ngram
listlengths
0
67.8k
[ "self.size opaque_type_instantiations = [OpaqueTypeInstantiation(s) for s in [8, 16, 32, 48, 64, 128]]", "primitive_type_instantiations + opaque_type_instantiations type_instantiation_imports = \"\"\" import numpy as np from libc.stdint cimport", "64, 128]] type_instantiations = primitive_type_instantiations + opaque_type_instantiations type_instantiation_imports = \"\"\" import numpy as", "\"float\"), PrimitiveTypeInstantiation(\"float\", \"np.float32\"), ] class OpaqueTypeInstantiation(namedtuple(\"OpaqueTypeInstantiation\", [\"size\"]), TypeInstantiation): @property def fixed_dtype(self): return None", "\"Opaque{}\".format(self.size) @property def element_py_type(self): return \"StructInstance\" @property def by_pointer(self): return True @property def", "type_scab(self): return identifier_for_string(self.element_c_type) class PrimitiveTypeInstantiation( namedtuple(\"PrimitiveTypeInstantiation\", [\"element_c_type\", \"element_py_type\",]), TypeInstantiation, ): @property def fixed_dtype(self):", "class OpaqueTypeInstantiation(namedtuple(\"OpaqueTypeInstantiation\", [\"size\"]), TypeInstantiation): @property def fixed_dtype(self): return None @property def element_c_type(self): return", "return self.size opaque_type_instantiations = [OpaqueTypeInstantiation(s) for s in [8, 16, 32, 48, 64,", "return True @property def type_key(self): return self.size opaque_type_instantiations = [OpaqueTypeInstantiation(s) for s in", "OpaqueTypeInstantiation(namedtuple(\"OpaqueTypeInstantiation\", [\"size\"]), TypeInstantiation): @property def fixed_dtype(self): return None @property def element_c_type(self): return \"Opaque{}\".format(self.size)", "import namedtuple NON_IDENTIFIER_CHAR_RE = re.compile(r\"[^a-zA-Z0-9]\") def identifier_for_string(s): return NON_IDENTIFIER_CHAR_RE.sub(\"_\", s) class TypeInstantiation(metaclass=ABCMeta): element_c_type:", "dynamic) @property def type_scab(self): return identifier_for_string(self.element_c_type) class PrimitiveTypeInstantiation( namedtuple(\"PrimitiveTypeInstantiation\", [\"element_c_type\", \"element_py_type\",]), TypeInstantiation, ):", "int64_t, uint64_t, int32_t, uint32_t \"\"\" exports = dict( primitive_type_instantiations=primitive_type_instantiations, opaque_type_instantiations=opaque_type_instantiations, type_instantiations=type_instantiations, type_instantiation_imports=type_instantiation_imports, )", "128]] type_instantiations = primitive_type_instantiations + opaque_type_instantiations type_instantiation_imports = \"\"\" import numpy as np", "return NON_IDENTIFIER_CHAR_RE.sub(\"_\", s) class TypeInstantiation(metaclass=ABCMeta): element_c_type: str element_py_type: str by_pointer: bool fixed_dtype: str", "or None type_key: str def dtype(self, dynamic): return \"np.dtype({})\".format(self.fixed_dtype or dynamic) @property def", "identifier_for_string(s): return NON_IDENTIFIER_CHAR_RE.sub(\"_\", s) class TypeInstantiation(metaclass=ABCMeta): element_c_type: str element_py_type: str by_pointer: bool fixed_dtype:", "collections import namedtuple NON_IDENTIFIER_CHAR_RE = re.compile(r\"[^a-zA-Z0-9]\") def identifier_for_string(s): return NON_IDENTIFIER_CHAR_RE.sub(\"_\", s) class TypeInstantiation(metaclass=ABCMeta):", "class PrimitiveTypeInstantiation( namedtuple(\"PrimitiveTypeInstantiation\", [\"element_c_type\", \"element_py_type\",]), TypeInstantiation, ): @property def fixed_dtype(self): return self.element_py_type @property", "True @property def type_key(self): return self.size opaque_type_instantiations = [OpaqueTypeInstantiation(s) for s in [8,", "fixed_dtype(self): return None @property def element_c_type(self): return \"Opaque{}\".format(self.size) @property def element_py_type(self): return \"StructInstance\"", "@property def type_scab(self): return identifier_for_string(self.element_c_type) class PrimitiveTypeInstantiation( namedtuple(\"PrimitiveTypeInstantiation\", [\"element_c_type\", \"element_py_type\",]), TypeInstantiation, ): @property", "self.element_py_type @property def by_pointer(self): return False @property def type_key(self): return self.element_py_type primitive_type_instantiations =", "dynamic): return \"np.dtype({})\".format(self.fixed_dtype or dynamic) @property def type_scab(self): return identifier_for_string(self.element_c_type) class PrimitiveTypeInstantiation( namedtuple(\"PrimitiveTypeInstantiation\",", "[\"size\"]), TypeInstantiation): @property def fixed_dtype(self): return None @property def element_c_type(self): return \"Opaque{}\".format(self.size) @property", "@property def fixed_dtype(self): return self.element_py_type @property def by_pointer(self): return False @property def type_key(self):", "PrimitiveTypeInstantiation(\"uint64_t\", \"np.uint64\"), PrimitiveTypeInstantiation(\"int64_t\", \"int\"), PrimitiveTypeInstantiation(\"uint32_t\", \"np.uint32\"), PrimitiveTypeInstantiation(\"int32_t\", \"np.int32\"), PrimitiveTypeInstantiation(\"double\", \"float\"), PrimitiveTypeInstantiation(\"float\", \"np.float32\"), ]", "s) class TypeInstantiation(metaclass=ABCMeta): element_c_type: str element_py_type: str by_pointer: bool fixed_dtype: str or None", "import ABCMeta from collections import namedtuple NON_IDENTIFIER_CHAR_RE = re.compile(r\"[^a-zA-Z0-9]\") def identifier_for_string(s): return NON_IDENTIFIER_CHAR_RE.sub(\"_\",", "or dynamic) @property def type_scab(self): return identifier_for_string(self.element_c_type) class PrimitiveTypeInstantiation( namedtuple(\"PrimitiveTypeInstantiation\", [\"element_c_type\", \"element_py_type\",]), TypeInstantiation,", "): @property def fixed_dtype(self): return self.element_py_type @property def by_pointer(self): return False @property def", "def type_key(self): return self.element_py_type primitive_type_instantiations = [ PrimitiveTypeInstantiation(\"uint64_t\", \"np.uint64\"), PrimitiveTypeInstantiation(\"int64_t\", \"int\"), PrimitiveTypeInstantiation(\"uint32_t\", \"np.uint32\"),", "@property def element_py_type(self): return \"StructInstance\" @property def by_pointer(self): return True @property def type_key(self):", "return self.element_py_type primitive_type_instantiations = [ PrimitiveTypeInstantiation(\"uint64_t\", \"np.uint64\"), PrimitiveTypeInstantiation(\"int64_t\", \"int\"), PrimitiveTypeInstantiation(\"uint32_t\", \"np.uint32\"), PrimitiveTypeInstantiation(\"int32_t\", \"np.int32\"),", "element_py_type: str by_pointer: bool fixed_dtype: str or None type_key: str def dtype(self, dynamic):", "fixed_dtype(self): return self.element_py_type @property def by_pointer(self): return False @property def type_key(self): return self.element_py_type", "\"StructInstance\" @property def by_pointer(self): return True @property def type_key(self): return self.size opaque_type_instantiations =", "PrimitiveTypeInstantiation(\"uint32_t\", \"np.uint32\"), PrimitiveTypeInstantiation(\"int32_t\", \"np.int32\"), PrimitiveTypeInstantiation(\"double\", \"float\"), PrimitiveTypeInstantiation(\"float\", \"np.float32\"), ] class OpaqueTypeInstantiation(namedtuple(\"OpaqueTypeInstantiation\", [\"size\"]), TypeInstantiation):", "from collections import namedtuple NON_IDENTIFIER_CHAR_RE = re.compile(r\"[^a-zA-Z0-9]\") def identifier_for_string(s): return NON_IDENTIFIER_CHAR_RE.sub(\"_\", s) class", "\"np.dtype({})\".format(self.fixed_dtype or dynamic) @property def type_scab(self): return identifier_for_string(self.element_c_type) class PrimitiveTypeInstantiation( namedtuple(\"PrimitiveTypeInstantiation\", [\"element_c_type\", \"element_py_type\",]),", "return \"StructInstance\" @property def by_pointer(self): return True @property def type_key(self): return self.size opaque_type_instantiations", "s in [8, 16, 32, 48, 64, 128]] type_instantiations = primitive_type_instantiations + opaque_type_instantiations", "dtype(self, dynamic): return \"np.dtype({})\".format(self.fixed_dtype or dynamic) @property def type_scab(self): return identifier_for_string(self.element_c_type) class PrimitiveTypeInstantiation(", "None @property def element_c_type(self): return \"Opaque{}\".format(self.size) @property def element_py_type(self): return \"StructInstance\" @property def", "@property def element_c_type(self): return \"Opaque{}\".format(self.size) @property def element_py_type(self): return \"StructInstance\" @property def by_pointer(self):", "primitive_type_instantiations = [ PrimitiveTypeInstantiation(\"uint64_t\", \"np.uint64\"), PrimitiveTypeInstantiation(\"int64_t\", \"int\"), PrimitiveTypeInstantiation(\"uint32_t\", \"np.uint32\"), PrimitiveTypeInstantiation(\"int32_t\", \"np.int32\"), PrimitiveTypeInstantiation(\"double\", \"float\"),", "TypeInstantiation, ): @property def fixed_dtype(self): return self.element_py_type @property def by_pointer(self): return False @property", "str element_py_type: str by_pointer: bool fixed_dtype: str or None type_key: str def dtype(self,", "PrimitiveTypeInstantiation(\"double\", \"float\"), PrimitiveTypeInstantiation(\"float\", \"np.float32\"), ] class OpaqueTypeInstantiation(namedtuple(\"OpaqueTypeInstantiation\", [\"size\"]), TypeInstantiation): @property def fixed_dtype(self): return", "return \"np.dtype({})\".format(self.fixed_dtype or dynamic) @property def type_scab(self): return identifier_for_string(self.element_c_type) class PrimitiveTypeInstantiation( namedtuple(\"PrimitiveTypeInstantiation\", [\"element_c_type\",", "namedtuple NON_IDENTIFIER_CHAR_RE = re.compile(r\"[^a-zA-Z0-9]\") def identifier_for_string(s): return NON_IDENTIFIER_CHAR_RE.sub(\"_\", s) class TypeInstantiation(metaclass=ABCMeta): element_c_type: str", "@property def by_pointer(self): return False @property def type_key(self): return self.element_py_type primitive_type_instantiations = [", "return False @property def type_key(self): return self.element_py_type primitive_type_instantiations = [ PrimitiveTypeInstantiation(\"uint64_t\", \"np.uint64\"), PrimitiveTypeInstantiation(\"int64_t\",", "type_instantiations = primitive_type_instantiations + opaque_type_instantiations type_instantiation_imports = \"\"\" import numpy as np from", "str by_pointer: bool fixed_dtype: str or None type_key: str def dtype(self, dynamic): return", "ABCMeta from collections import namedtuple NON_IDENTIFIER_CHAR_RE = re.compile(r\"[^a-zA-Z0-9]\") def identifier_for_string(s): return NON_IDENTIFIER_CHAR_RE.sub(\"_\", s)", "NON_IDENTIFIER_CHAR_RE.sub(\"_\", s) class TypeInstantiation(metaclass=ABCMeta): element_c_type: str element_py_type: str by_pointer: bool fixed_dtype: str or", "TypeInstantiation(metaclass=ABCMeta): element_c_type: str element_py_type: str by_pointer: bool fixed_dtype: str or None type_key: str", "@property def by_pointer(self): return True @property def type_key(self): return self.size opaque_type_instantiations = [OpaqueTypeInstantiation(s)", "as np from libc.stdint cimport int64_t, uint64_t, int32_t, uint32_t \"\"\" exports = dict(", "<reponame>chakpongchung/katana import re from abc import ABCMeta from collections import namedtuple NON_IDENTIFIER_CHAR_RE =", "str or None type_key: str def dtype(self, dynamic): return \"np.dtype({})\".format(self.fixed_dtype or dynamic) @property", "fixed_dtype: str or None type_key: str def dtype(self, dynamic): return \"np.dtype({})\".format(self.fixed_dtype or dynamic)", "def identifier_for_string(s): return NON_IDENTIFIER_CHAR_RE.sub(\"_\", s) class TypeInstantiation(metaclass=ABCMeta): element_c_type: str element_py_type: str by_pointer: bool", "\"\"\" import numpy as np from libc.stdint cimport int64_t, uint64_t, int32_t, uint32_t \"\"\"", "re.compile(r\"[^a-zA-Z0-9]\") def identifier_for_string(s): return NON_IDENTIFIER_CHAR_RE.sub(\"_\", s) class TypeInstantiation(metaclass=ABCMeta): element_c_type: str element_py_type: str by_pointer:", "[\"element_c_type\", \"element_py_type\",]), TypeInstantiation, ): @property def fixed_dtype(self): return self.element_py_type @property def by_pointer(self): return", "abc import ABCMeta from collections import namedtuple NON_IDENTIFIER_CHAR_RE = re.compile(r\"[^a-zA-Z0-9]\") def identifier_for_string(s): return", "= \"\"\" import numpy as np from libc.stdint cimport int64_t, uint64_t, int32_t, uint32_t", "\"np.int32\"), PrimitiveTypeInstantiation(\"double\", \"float\"), PrimitiveTypeInstantiation(\"float\", \"np.float32\"), ] class OpaqueTypeInstantiation(namedtuple(\"OpaqueTypeInstantiation\", [\"size\"]), TypeInstantiation): @property def fixed_dtype(self):", "return None @property def element_c_type(self): return \"Opaque{}\".format(self.size) @property def element_py_type(self): return \"StructInstance\" @property", "32, 48, 64, 128]] type_instantiations = primitive_type_instantiations + opaque_type_instantiations type_instantiation_imports = \"\"\" import", "numpy as np from libc.stdint cimport int64_t, uint64_t, int32_t, uint32_t \"\"\" exports =", "= primitive_type_instantiations + opaque_type_instantiations type_instantiation_imports = \"\"\" import numpy as np from libc.stdint", "\"np.uint64\"), PrimitiveTypeInstantiation(\"int64_t\", \"int\"), PrimitiveTypeInstantiation(\"uint32_t\", \"np.uint32\"), PrimitiveTypeInstantiation(\"int32_t\", \"np.int32\"), PrimitiveTypeInstantiation(\"double\", \"float\"), PrimitiveTypeInstantiation(\"float\", \"np.float32\"), ] class", "def by_pointer(self): return False @property def type_key(self): return self.element_py_type primitive_type_instantiations = [ PrimitiveTypeInstantiation(\"uint64_t\",", "TypeInstantiation): @property def fixed_dtype(self): return None @property def element_c_type(self): return \"Opaque{}\".format(self.size) @property def", "def by_pointer(self): return True @property def type_key(self): return self.size opaque_type_instantiations = [OpaqueTypeInstantiation(s) for", "False @property def type_key(self): return self.element_py_type primitive_type_instantiations = [ PrimitiveTypeInstantiation(\"uint64_t\", \"np.uint64\"), PrimitiveTypeInstantiation(\"int64_t\", \"int\"),", "[ PrimitiveTypeInstantiation(\"uint64_t\", \"np.uint64\"), PrimitiveTypeInstantiation(\"int64_t\", \"int\"), PrimitiveTypeInstantiation(\"uint32_t\", \"np.uint32\"), PrimitiveTypeInstantiation(\"int32_t\", \"np.int32\"), PrimitiveTypeInstantiation(\"double\", \"float\"), PrimitiveTypeInstantiation(\"float\", \"np.float32\"),", "[8, 16, 32, 48, 64, 128]] type_instantiations = primitive_type_instantiations + opaque_type_instantiations type_instantiation_imports =", "opaque_type_instantiations type_instantiation_imports = \"\"\" import numpy as np from libc.stdint cimport int64_t, uint64_t,", "libc.stdint cimport int64_t, uint64_t, int32_t, uint32_t \"\"\" exports = dict( primitive_type_instantiations=primitive_type_instantiations, opaque_type_instantiations=opaque_type_instantiations, type_instantiations=type_instantiations,", "[OpaqueTypeInstantiation(s) for s in [8, 16, 32, 48, 64, 128]] type_instantiations = primitive_type_instantiations", "def type_key(self): return self.size opaque_type_instantiations = [OpaqueTypeInstantiation(s) for s in [8, 16, 32,", "from abc import ABCMeta from collections import namedtuple NON_IDENTIFIER_CHAR_RE = re.compile(r\"[^a-zA-Z0-9]\") def identifier_for_string(s):", "@property def fixed_dtype(self): return None @property def element_c_type(self): return \"Opaque{}\".format(self.size) @property def element_py_type(self):", "type_key: str def dtype(self, dynamic): return \"np.dtype({})\".format(self.fixed_dtype or dynamic) @property def type_scab(self): return", "by_pointer(self): return True @property def type_key(self): return self.size opaque_type_instantiations = [OpaqueTypeInstantiation(s) for s", "self.element_py_type primitive_type_instantiations = [ PrimitiveTypeInstantiation(\"uint64_t\", \"np.uint64\"), PrimitiveTypeInstantiation(\"int64_t\", \"int\"), PrimitiveTypeInstantiation(\"uint32_t\", \"np.uint32\"), PrimitiveTypeInstantiation(\"int32_t\", \"np.int32\"), PrimitiveTypeInstantiation(\"double\",", "from libc.stdint cimport int64_t, uint64_t, int32_t, uint32_t \"\"\" exports = dict( primitive_type_instantiations=primitive_type_instantiations, opaque_type_instantiations=opaque_type_instantiations,", "] class OpaqueTypeInstantiation(namedtuple(\"OpaqueTypeInstantiation\", [\"size\"]), TypeInstantiation): @property def fixed_dtype(self): return None @property def element_c_type(self):", "np from libc.stdint cimport int64_t, uint64_t, int32_t, uint32_t \"\"\" exports = dict( primitive_type_instantiations=primitive_type_instantiations,", "None type_key: str def dtype(self, dynamic): return \"np.dtype({})\".format(self.fixed_dtype or dynamic) @property def type_scab(self):", "def element_py_type(self): return \"StructInstance\" @property def by_pointer(self): return True @property def type_key(self): return", "= [ PrimitiveTypeInstantiation(\"uint64_t\", \"np.uint64\"), PrimitiveTypeInstantiation(\"int64_t\", \"int\"), PrimitiveTypeInstantiation(\"uint32_t\", \"np.uint32\"), PrimitiveTypeInstantiation(\"int32_t\", \"np.int32\"), PrimitiveTypeInstantiation(\"double\", \"float\"), PrimitiveTypeInstantiation(\"float\",", "import re from abc import ABCMeta from collections import namedtuple NON_IDENTIFIER_CHAR_RE = re.compile(r\"[^a-zA-Z0-9]\")", "opaque_type_instantiations = [OpaqueTypeInstantiation(s) for s in [8, 16, 32, 48, 64, 128]] type_instantiations", "element_py_type(self): return \"StructInstance\" @property def by_pointer(self): return True @property def type_key(self): return self.size", "return identifier_for_string(self.element_c_type) class PrimitiveTypeInstantiation( namedtuple(\"PrimitiveTypeInstantiation\", [\"element_c_type\", \"element_py_type\",]), TypeInstantiation, ): @property def fixed_dtype(self): return", "@property def type_key(self): return self.size opaque_type_instantiations = [OpaqueTypeInstantiation(s) for s in [8, 16,", "PrimitiveTypeInstantiation(\"int64_t\", \"int\"), PrimitiveTypeInstantiation(\"uint32_t\", \"np.uint32\"), PrimitiveTypeInstantiation(\"int32_t\", \"np.int32\"), PrimitiveTypeInstantiation(\"double\", \"float\"), PrimitiveTypeInstantiation(\"float\", \"np.float32\"), ] class OpaqueTypeInstantiation(namedtuple(\"OpaqueTypeInstantiation\",", "48, 64, 128]] type_instantiations = primitive_type_instantiations + opaque_type_instantiations type_instantiation_imports = \"\"\" import numpy", "by_pointer: bool fixed_dtype: str or None type_key: str def dtype(self, dynamic): return \"np.dtype({})\".format(self.fixed_dtype", "= re.compile(r\"[^a-zA-Z0-9]\") def identifier_for_string(s): return NON_IDENTIFIER_CHAR_RE.sub(\"_\", s) class TypeInstantiation(metaclass=ABCMeta): element_c_type: str element_py_type: str", "16, 32, 48, 64, 128]] type_instantiations = primitive_type_instantiations + opaque_type_instantiations type_instantiation_imports = \"\"\"", "PrimitiveTypeInstantiation( namedtuple(\"PrimitiveTypeInstantiation\", [\"element_c_type\", \"element_py_type\",]), TypeInstantiation, ): @property def fixed_dtype(self): return self.element_py_type @property def", "def dtype(self, dynamic): return \"np.dtype({})\".format(self.fixed_dtype or dynamic) @property def type_scab(self): return identifier_for_string(self.element_c_type) class", "def fixed_dtype(self): return None @property def element_c_type(self): return \"Opaque{}\".format(self.size) @property def element_py_type(self): return", "element_c_type(self): return \"Opaque{}\".format(self.size) @property def element_py_type(self): return \"StructInstance\" @property def by_pointer(self): return True", "\"np.float32\"), ] class OpaqueTypeInstantiation(namedtuple(\"OpaqueTypeInstantiation\", [\"size\"]), TypeInstantiation): @property def fixed_dtype(self): return None @property def", "cimport int64_t, uint64_t, int32_t, uint32_t \"\"\" exports = dict( primitive_type_instantiations=primitive_type_instantiations, opaque_type_instantiations=opaque_type_instantiations, type_instantiations=type_instantiations, type_instantiation_imports=type_instantiation_imports,", "return \"Opaque{}\".format(self.size) @property def element_py_type(self): return \"StructInstance\" @property def by_pointer(self): return True @property", "type_key(self): return self.element_py_type primitive_type_instantiations = [ PrimitiveTypeInstantiation(\"uint64_t\", \"np.uint64\"), PrimitiveTypeInstantiation(\"int64_t\", \"int\"), PrimitiveTypeInstantiation(\"uint32_t\", \"np.uint32\"), PrimitiveTypeInstantiation(\"int32_t\",", "by_pointer(self): return False @property def type_key(self): return self.element_py_type primitive_type_instantiations = [ PrimitiveTypeInstantiation(\"uint64_t\", \"np.uint64\"),", "type_key(self): return self.size opaque_type_instantiations = [OpaqueTypeInstantiation(s) for s in [8, 16, 32, 48,", "in [8, 16, 32, 48, 64, 128]] type_instantiations = primitive_type_instantiations + opaque_type_instantiations type_instantiation_imports", "NON_IDENTIFIER_CHAR_RE = re.compile(r\"[^a-zA-Z0-9]\") def identifier_for_string(s): return NON_IDENTIFIER_CHAR_RE.sub(\"_\", s) class TypeInstantiation(metaclass=ABCMeta): element_c_type: str element_py_type:", "def element_c_type(self): return \"Opaque{}\".format(self.size) @property def element_py_type(self): return \"StructInstance\" @property def by_pointer(self): return", "class TypeInstantiation(metaclass=ABCMeta): element_c_type: str element_py_type: str by_pointer: bool fixed_dtype: str or None type_key:", "bool fixed_dtype: str or None type_key: str def dtype(self, dynamic): return \"np.dtype({})\".format(self.fixed_dtype or", "str def dtype(self, dynamic): return \"np.dtype({})\".format(self.fixed_dtype or dynamic) @property def type_scab(self): return identifier_for_string(self.element_c_type)", "\"element_py_type\",]), TypeInstantiation, ): @property def fixed_dtype(self): return self.element_py_type @property def by_pointer(self): return False", "def fixed_dtype(self): return self.element_py_type @property def by_pointer(self): return False @property def type_key(self): return", "@property def type_key(self): return self.element_py_type primitive_type_instantiations = [ PrimitiveTypeInstantiation(\"uint64_t\", \"np.uint64\"), PrimitiveTypeInstantiation(\"int64_t\", \"int\"), PrimitiveTypeInstantiation(\"uint32_t\",", "def type_scab(self): return identifier_for_string(self.element_c_type) class PrimitiveTypeInstantiation( namedtuple(\"PrimitiveTypeInstantiation\", [\"element_c_type\", \"element_py_type\",]), TypeInstantiation, ): @property def", "for s in [8, 16, 32, 48, 64, 128]] type_instantiations = primitive_type_instantiations +", "return self.element_py_type @property def by_pointer(self): return False @property def type_key(self): return self.element_py_type primitive_type_instantiations", "namedtuple(\"PrimitiveTypeInstantiation\", [\"element_c_type\", \"element_py_type\",]), TypeInstantiation, ): @property def fixed_dtype(self): return self.element_py_type @property def by_pointer(self):", "= [OpaqueTypeInstantiation(s) for s in [8, 16, 32, 48, 64, 128]] type_instantiations =", "PrimitiveTypeInstantiation(\"int32_t\", \"np.int32\"), PrimitiveTypeInstantiation(\"double\", \"float\"), PrimitiveTypeInstantiation(\"float\", \"np.float32\"), ] class OpaqueTypeInstantiation(namedtuple(\"OpaqueTypeInstantiation\", [\"size\"]), TypeInstantiation): @property def", "\"np.uint32\"), PrimitiveTypeInstantiation(\"int32_t\", \"np.int32\"), PrimitiveTypeInstantiation(\"double\", \"float\"), PrimitiveTypeInstantiation(\"float\", \"np.float32\"), ] class OpaqueTypeInstantiation(namedtuple(\"OpaqueTypeInstantiation\", [\"size\"]), TypeInstantiation): @property", "\"int\"), PrimitiveTypeInstantiation(\"uint32_t\", \"np.uint32\"), PrimitiveTypeInstantiation(\"int32_t\", \"np.int32\"), PrimitiveTypeInstantiation(\"double\", \"float\"), PrimitiveTypeInstantiation(\"float\", \"np.float32\"), ] class OpaqueTypeInstantiation(namedtuple(\"OpaqueTypeInstantiation\", [\"size\"]),", "PrimitiveTypeInstantiation(\"float\", \"np.float32\"), ] class OpaqueTypeInstantiation(namedtuple(\"OpaqueTypeInstantiation\", [\"size\"]), TypeInstantiation): @property def fixed_dtype(self): return None @property", "import numpy as np from libc.stdint cimport int64_t, uint64_t, int32_t, uint32_t \"\"\" exports", "re from abc import ABCMeta from collections import namedtuple NON_IDENTIFIER_CHAR_RE = re.compile(r\"[^a-zA-Z0-9]\") def", "+ opaque_type_instantiations type_instantiation_imports = \"\"\" import numpy as np from libc.stdint cimport int64_t,", "identifier_for_string(self.element_c_type) class PrimitiveTypeInstantiation( namedtuple(\"PrimitiveTypeInstantiation\", [\"element_c_type\", \"element_py_type\",]), TypeInstantiation, ): @property def fixed_dtype(self): return self.element_py_type", "type_instantiation_imports = \"\"\" import numpy as np from libc.stdint cimport int64_t, uint64_t, int32_t,", "element_c_type: str element_py_type: str by_pointer: bool fixed_dtype: str or None type_key: str def" ]
[ "SSHCertificateParserError(Exception): pass class UnsupportedKeyTypeError(SSHCertificateParserError): \"\"\"This key has a type which we do not", "<reponame>thinkwelltwd/aspen_ssh class SSHCertificateParserError(Exception): pass class UnsupportedKeyTypeError(SSHCertificateParserError): \"\"\"This key has a type which we", "a type which we do not know how to parse\"\"\" class InputTooShortError(SSHCertificateParserError): pass", "\"\"\"This key has a type which we do not know how to parse\"\"\"", "class UnsupportedKeyTypeError(SSHCertificateParserError): \"\"\"This key has a type which we do not know how", "key has a type which we do not know how to parse\"\"\" class", "has a type which we do not know how to parse\"\"\" class InputTooShortError(SSHCertificateParserError):", "pass class UnsupportedKeyTypeError(SSHCertificateParserError): \"\"\"This key has a type which we do not know", "UnsupportedKeyTypeError(SSHCertificateParserError): \"\"\"This key has a type which we do not know how to", "class SSHCertificateParserError(Exception): pass class UnsupportedKeyTypeError(SSHCertificateParserError): \"\"\"This key has a type which we do" ]
[ "import webview_convert_agents import instrumentation_convert_agents import test_base_convert_agent import content_convert_agents import logging import argparse import", "os import sys _TEST_AGENT_DICT = { \"chrome-base-test-case\": chrome_convert_agents.ChromeActivityBaseCaseAgent, \"chrome-permission-test\": chrome_convert_agents.PermissionTestAgent, \"chrome-tabbed-test\": chrome_convert_agents.ChromeTabbedTestAgent, \"instrumentation\":", "save_as_new=save_as_new, use_base_class=use_base_class) if agent._failed_to_parse: continue if use_base_class or not agent.skip(): agent.actions() return agent", "any agent') def SetLogger(logging_level, filepath): log = logging.getLogger() filename = filepath.split('/')[-1] f =", "whole_path = os.path.join(dirpath, filename) agent = ConvertFile( java_parser, agent_strings, whole_path, save_as_new, previous_agent=agent, logging_level=logging_level,", "default=False, action='store_true', help='Save as a new file') argument_parser.add_argument( '-a', '--agent', help='Specify the agent", "logging_level=logging_level, use_base_class=arguments.use_base_class) else: ConvertDirectory( arguments.directory, java_parser, agents, save_as_new=arguments.save_as_new, logging_level=logging_level, use_base_class=arguments.use_base_class) if __name__ ==", "arguments.java_file: ConvertFile(java_parser, agents, arguments.java_file, arguments.save_as_new, logging_level=logging_level, use_base_class=arguments.use_base_class) else: ConvertDirectory( arguments.directory, java_parser, agents, save_as_new=arguments.save_as_new,", "len(log.handlers) > 0: log.removeHandler(log.handlers[0]) log.setLevel(logging_level) log.addHandler(fh) return log def CreateJavaParser(logging_level=logging.ERROR): logger = logging.getLogger('parser_logger')", "logging_level = logging.DEBUG if arguments.list_agents: print('Available agents and description:\\n') for agent, agent_class in", "argument_parser.add_argument('-v', '--verbose', help='Log info', action='store_true') argument_parser.add_argument( '-l', '--list-agents', help='List all available agents', action='store_true',", "help='Use another base class to convert', default=False, action='store_true') argument_parser.add_argument( '--no-skip', help='Do not skip", "%(message)s') fh = logging.StreamHandler() fh.setLevel(logging_level) fh.setFormatter(f) log.propagate = False if len(log.handlers) > 0:", "'-a', '--agent', help='Specify the agent for the current file', default='all') arguments = argument_parser.parse_args(sys.argv[1:])", "argparse import os import sys _TEST_AGENT_DICT = { \"chrome-base-test-case\": chrome_convert_agents.ChromeActivityBaseCaseAgent, \"chrome-permission-test\": chrome_convert_agents.PermissionTestAgent, \"chrome-tabbed-test\":", "chrome_convert_agents.ChromeTabbedTestAgent, \"instrumentation\": instrumentation_convert_agents.InstrumentationTestCaseAgent, \"multiactivity-test\": chrome_convert_agents.MultiActivityTestAgent, \"vr-test\": chrome_convert_agents.ChromeVrTestAgent, \"payment-test\": chrome_convert_agents.PaymentRequestAgent, \"mojo-test\": chrome_convert_agents.MojoTestAgent, \"cast-test\": chrome_convert_agents.CastTestAgent,", "whole_path, save_as_new, previous_agent=agent, logging_level=logging_level, use_base_class=use_base_class) def ConvertFile(java_parser, agent_strings, whole_path, save_as_new, previous_agent=None, logging_level=logging.WARNING, use_base_class=False):", "agents, arguments.java_file, arguments.save_as_new, logging_level=logging_level, use_base_class=arguments.use_base_class) else: ConvertDirectory( arguments.directory, java_parser, agents, save_as_new=arguments.save_as_new, logging_level=logging_level, use_base_class=arguments.use_base_class)", "logger.error('Failed to match to any agent') def SetLogger(logging_level, filepath): log = logging.getLogger() filename", "import logging import argparse import os import sys _TEST_AGENT_DICT = { \"chrome-base-test-case\": chrome_convert_agents.ChromeActivityBaseCaseAgent,", "\"connectivity-checker-test\": content_convert_agents.ConnectivityCheckerTestAgent, \"tab-model-selector-observer-test\": content_convert_agents.SelectorObserverTest, \"native-library-test\": content_convert_agents.NativeLibraryTestAgent, \"content-shell-test\": content_convert_agents.ContentShellTestAgent, \"dialog-overlay-impl-test\": content_convert_agents.DialogOverlayImplTestAgent, \"webview-test\": webview_convert_agents.WebViewTestAgent, \"cronet-test\":", "agent=agent, save_as_new=save_as_new, use_base_class=use_base_class) if agent._failed_to_parse: continue if use_base_class or not agent.skip(): agent.actions() return", "chrome_convert_agents.DownloadTestAgent, \"bottom-sheet-test\": chrome_convert_agents.BottomSheetTestAgent, \"connectivity-checker-test\": content_convert_agents.ConnectivityCheckerTestAgent, \"tab-model-selector-observer-test\": content_convert_agents.SelectorObserverTest, \"native-library-test\": content_convert_agents.NativeLibraryTestAgent, \"content-shell-test\": content_convert_agents.ContentShellTestAgent, \"dialog-overlay-impl-test\": content_convert_agents.DialogOverlayImplTestAgent,", "agent') def SetLogger(logging_level, filepath): log = logging.getLogger() filename = filepath.split('/')[-1] f = logging.Formatter(", "= os.path.join(dirpath, filename) agent = ConvertFile( java_parser, agent_strings, whole_path, save_as_new, previous_agent=agent, logging_level=logging_level, use_base_class=use_base_class)", "ConvertFile( java_parser, agent_strings, whole_path, save_as_new, previous_agent=agent, logging_level=logging_level, use_base_class=use_base_class) def ConvertFile(java_parser, agent_strings, whole_path, save_as_new,", "'--java-file', help='Java file') argument_parser.add_argument('-d', '--directory', help='Directory where all java file lives') argument_parser.add_argument('-v', '--verbose',", "= logging.DEBUG if arguments.list_agents: print('Available agents and description:\\n') for agent, agent_class in _AGENT_DICT.iteritems():", "for (dirpath, _, filenames) in os.walk(directory): for filename in filenames: whole_path = os.path.join(dirpath,", "for agent_class in [_AGENT_DICT[i] for i in agent_strings if _AGENT_DICT[i].filename_match(whole_path)]: agent = agent_class(java_parser,", "if arguments.java_file and arguments.directory: raise Exception( 'Can not specify --jave-file and --directory at", "= logging.getLogger('parser_logger') logger.setLevel(logging_level) return parser.Parser(logger) def main(): argument_parser = argparse.ArgumentParser() argument_parser.add_argument( '-u', '--use-base-class',", "arguments.save_as_new, logging_level=logging_level, use_base_class=arguments.use_base_class) else: ConvertDirectory( arguments.directory, java_parser, agents, save_as_new=arguments.save_as_new, logging_level=logging_level, use_base_class=arguments.use_base_class) if __name__", "= _TEST_AGENT_DICT.copy() _AGENT_DICT.update({\"base-class\": test_base_convert_agent.BaseCaseAgent}) def ConvertDirectory(directory, java_parser, agent_strings, save_as_new=False, logging_level=logging.WARNING, use_base_class=False): agent =", "CreateJavaParser() if arguments.java_file: ConvertFile(java_parser, agents, arguments.java_file, arguments.save_as_new, logging_level=logging_level, use_base_class=arguments.use_base_class) else: ConvertDirectory( arguments.directory, java_parser,", "SetLogger(logging_level, whole_path) agent = previous_agent for agent_class in [_AGENT_DICT[i] for i in agent_strings", "\"provider-test\": chrome_convert_agents.ProviderTestAgent, \"customtabs-test\": chrome_convert_agents.CustomTabActivityTestAgent, \"notification-test\": chrome_convert_agents.NotificationTestAgent, #\"download-test\": chrome_convert_agents.DownloadTestAgent, \"bottom-sheet-test\": chrome_convert_agents.BottomSheetTestAgent, \"connectivity-checker-test\": content_convert_agents.ConnectivityCheckerTestAgent, \"tab-model-selector-observer-test\":", "filenames: whole_path = os.path.join(dirpath, filename) agent = ConvertFile( java_parser, agent_strings, whole_path, save_as_new, previous_agent=agent,", "if agent._failed_to_parse: continue if use_base_class or not agent.skip(): agent.actions() return agent logger.error('Failed to", "another base class to convert', default=False, action='store_true') argument_parser.add_argument( '--no-skip', help='Do not skip the", "chrome_convert_agents.ChromeActivityBaseCaseAgent, \"chrome-permission-test\": chrome_convert_agents.PermissionTestAgent, \"chrome-tabbed-test\": chrome_convert_agents.ChromeTabbedTestAgent, \"instrumentation\": instrumentation_convert_agents.InstrumentationTestCaseAgent, \"multiactivity-test\": chrome_convert_agents.MultiActivityTestAgent, \"vr-test\": chrome_convert_agents.ChromeVrTestAgent, \"payment-test\": chrome_convert_agents.PaymentRequestAgent,", "_, filenames) in os.walk(directory): for filename in filenames: whole_path = os.path.join(dirpath, filename) agent", "import content_convert_agents import logging import argparse import os import sys _TEST_AGENT_DICT = {", "+ ':%(levelname)s:%(module)s:%(lineno)s: %(message)s') fh = logging.StreamHandler() fh.setLevel(logging_level) fh.setFormatter(f) log.propagate = False if len(log.handlers)", "= previous_agent for agent_class in [_AGENT_DICT[i] for i in agent_strings if _AGENT_DICT[i].filename_match(whole_path)]: agent", "use_base_class=arguments.use_base_class) else: ConvertDirectory( arguments.directory, java_parser, agents, save_as_new=arguments.save_as_new, logging_level=logging_level, use_base_class=arguments.use_base_class) if __name__ == '__main__':", "log def CreateJavaParser(logging_level=logging.ERROR): logger = logging.getLogger('parser_logger') logger.setLevel(logging_level) return parser.Parser(logger) def main(): argument_parser =", "\"chrome-base-test-case\": chrome_convert_agents.ChromeActivityBaseCaseAgent, \"chrome-permission-test\": chrome_convert_agents.PermissionTestAgent, \"chrome-tabbed-test\": chrome_convert_agents.ChromeTabbedTestAgent, \"instrumentation\": instrumentation_convert_agents.InstrumentationTestCaseAgent, \"multiactivity-test\": chrome_convert_agents.MultiActivityTestAgent, \"vr-test\": chrome_convert_agents.ChromeVrTestAgent, \"payment-test\":", "whole_path, save_as_new, previous_agent=None, logging_level=logging.WARNING, use_base_class=False): logger = SetLogger(logging_level, whole_path) agent = previous_agent for", "import parser import chrome_convert_agents import webview_convert_agents import instrumentation_convert_agents import test_base_convert_agent import content_convert_agents import", "\"dialog-overlay-impl-test\": content_convert_agents.DialogOverlayImplTestAgent, \"webview-test\": webview_convert_agents.WebViewTestAgent, \"cronet-test\": chrome_convert_agents.CronetTestAgent, \"partner-unit-test\": chrome_convert_agents.PartnerUnitTestAgent, \"sync-test\": chrome_convert_agents.SyncTestAgent, \"partner-integration-test\": chrome_convert_agents.PartnerIntegrationTestAgent, \"crash-test\":", "previous_agent for agent_class in [_AGENT_DICT[i] for i in agent_strings if _AGENT_DICT[i].filename_match(whole_path)]: agent =", "continue if use_base_class or not agent.skip(): agent.actions() return agent logger.error('Failed to match to", "lives') argument_parser.add_argument('-v', '--verbose', help='Log info', action='store_true') argument_parser.add_argument( '-l', '--list-agents', help='List all available agents',", "specify --jave-file and --directory at the same time') if arguments.agent == 'all': agents", "== 'all': agents = _TEST_AGENT_DICT.keys() else: agents = [arguments.agent] java_parser = CreateJavaParser() if", "chrome_convert_agents.MultiActivityTestAgent, \"vr-test\": chrome_convert_agents.ChromeVrTestAgent, \"payment-test\": chrome_convert_agents.PaymentRequestAgent, \"mojo-test\": chrome_convert_agents.MojoTestAgent, \"cast-test\": chrome_convert_agents.CastTestAgent, \"provider-test\": chrome_convert_agents.ProviderTestAgent, \"customtabs-test\": chrome_convert_agents.CustomTabActivityTestAgent,", "\"payment-test\": chrome_convert_agents.PaymentRequestAgent, \"mojo-test\": chrome_convert_agents.MojoTestAgent, \"cast-test\": chrome_convert_agents.CastTestAgent, \"provider-test\": chrome_convert_agents.ProviderTestAgent, \"customtabs-test\": chrome_convert_agents.CustomTabActivityTestAgent, \"notification-test\": chrome_convert_agents.NotificationTestAgent, #\"download-test\":", "and --directory at the same time') if arguments.agent == 'all': agents = _TEST_AGENT_DICT.keys()", "chrome_convert_agents.PartnerUnitTestAgent, \"sync-test\": chrome_convert_agents.SyncTestAgent, \"partner-integration-test\": chrome_convert_agents.PartnerIntegrationTestAgent, \"crash-test\": chrome_convert_agents.CrashTestAgent, } _AGENT_DICT = _TEST_AGENT_DICT.copy() _AGENT_DICT.update({\"base-class\": test_base_convert_agent.BaseCaseAgent})", "False if len(log.handlers) > 0: log.removeHandler(log.handlers[0]) log.setLevel(logging_level) log.addHandler(fh) return log def CreateJavaParser(logging_level=logging.ERROR): logger", "logging_level=logging_level, use_base_class=use_base_class) def ConvertFile(java_parser, agent_strings, whole_path, save_as_new, previous_agent=None, logging_level=logging.WARNING, use_base_class=False): logger = SetLogger(logging_level,", "default=False) argument_parser.add_argument('-n', '--save-as-new', default=False, action='store_true', help='Save as a new file') argument_parser.add_argument( '-a', '--agent',", "os.path.join(dirpath, filename) agent = ConvertFile( java_parser, agent_strings, whole_path, save_as_new, previous_agent=agent, logging_level=logging_level, use_base_class=use_base_class) def", "agent, agent_class in _AGENT_DICT.iteritems(): print(\"%25s:\\t%s\" % (agent, agent_class.__doc__.strip())) return if arguments.java_file and arguments.directory:", "logging.DEBUG if arguments.list_agents: print('Available agents and description:\\n') for agent, agent_class in _AGENT_DICT.iteritems(): print(\"%25s:\\t%s\"", "whole_path, logger=logger, agent=agent, save_as_new=save_as_new, use_base_class=use_base_class) if agent._failed_to_parse: continue if use_base_class or not agent.skip():", "import instrumentation_convert_agents import test_base_convert_agent import content_convert_agents import logging import argparse import os import", "file', action='store_true', default=False) argument_parser.add_argument('-f', '--java-file', help='Java file') argument_parser.add_argument('-d', '--directory', help='Directory where all java", "\"instrumentation\": instrumentation_convert_agents.InstrumentationTestCaseAgent, \"multiactivity-test\": chrome_convert_agents.MultiActivityTestAgent, \"vr-test\": chrome_convert_agents.ChromeVrTestAgent, \"payment-test\": chrome_convert_agents.PaymentRequestAgent, \"mojo-test\": chrome_convert_agents.MojoTestAgent, \"cast-test\": chrome_convert_agents.CastTestAgent, \"provider-test\":", "(agent, agent_class.__doc__.strip())) return if arguments.java_file and arguments.directory: raise Exception( 'Can not specify --jave-file", "argument_parser = argparse.ArgumentParser() argument_parser.add_argument( '-u', '--use-base-class', help='Use another base class to convert', default=False,", "agent = ConvertFile( java_parser, agent_strings, whole_path, save_as_new, previous_agent=agent, logging_level=logging_level, use_base_class=use_base_class) def ConvertFile(java_parser, agent_strings,", "ConvertDirectory(directory, java_parser, agent_strings, save_as_new=False, logging_level=logging.WARNING, use_base_class=False): agent = None for (dirpath, _, filenames)", "chrome_convert_agents.BottomSheetTestAgent, \"connectivity-checker-test\": content_convert_agents.ConnectivityCheckerTestAgent, \"tab-model-selector-observer-test\": content_convert_agents.SelectorObserverTest, \"native-library-test\": content_convert_agents.NativeLibraryTestAgent, \"content-shell-test\": content_convert_agents.ContentShellTestAgent, \"dialog-overlay-impl-test\": content_convert_agents.DialogOverlayImplTestAgent, \"webview-test\": webview_convert_agents.WebViewTestAgent,", "agent_strings, whole_path, save_as_new, previous_agent=None, logging_level=logging.WARNING, use_base_class=False): logger = SetLogger(logging_level, whole_path) agent = previous_agent", "agent.actions() return agent logger.error('Failed to match to any agent') def SetLogger(logging_level, filepath): log", "= logging.Formatter( filename + ':%(levelname)s:%(module)s:%(lineno)s: %(message)s') fh = logging.StreamHandler() fh.setLevel(logging_level) fh.setFormatter(f) log.propagate =", "ConvertFile(java_parser, agent_strings, whole_path, save_as_new, previous_agent=None, logging_level=logging.WARNING, use_base_class=False): logger = SetLogger(logging_level, whole_path) agent =", "content_convert_agents.DialogOverlayImplTestAgent, \"webview-test\": webview_convert_agents.WebViewTestAgent, \"cronet-test\": chrome_convert_agents.CronetTestAgent, \"partner-unit-test\": chrome_convert_agents.PartnerUnitTestAgent, \"sync-test\": chrome_convert_agents.SyncTestAgent, \"partner-integration-test\": chrome_convert_agents.PartnerIntegrationTestAgent, \"crash-test\": chrome_convert_agents.CrashTestAgent,", "} _AGENT_DICT = _TEST_AGENT_DICT.copy() _AGENT_DICT.update({\"base-class\": test_base_convert_agent.BaseCaseAgent}) def ConvertDirectory(directory, java_parser, agent_strings, save_as_new=False, logging_level=logging.WARNING, use_base_class=False):", "else: agents = [arguments.agent] java_parser = CreateJavaParser() if arguments.java_file: ConvertFile(java_parser, agents, arguments.java_file, arguments.save_as_new,", "logger=logger, agent=agent, save_as_new=save_as_new, use_base_class=use_base_class) if agent._failed_to_parse: continue if use_base_class or not agent.skip(): agent.actions()", "python import parser import chrome_convert_agents import webview_convert_agents import instrumentation_convert_agents import test_base_convert_agent import content_convert_agents", "\"webview-test\": webview_convert_agents.WebViewTestAgent, \"cronet-test\": chrome_convert_agents.CronetTestAgent, \"partner-unit-test\": chrome_convert_agents.PartnerUnitTestAgent, \"sync-test\": chrome_convert_agents.SyncTestAgent, \"partner-integration-test\": chrome_convert_agents.PartnerIntegrationTestAgent, \"crash-test\": chrome_convert_agents.CrashTestAgent, }", "arguments.directory: raise Exception( 'Can not specify --jave-file and --directory at the same time')", "return agent logger.error('Failed to match to any agent') def SetLogger(logging_level, filepath): log =", "new file') argument_parser.add_argument( '-a', '--agent', help='Specify the agent for the current file', default='all')", "a new file') argument_parser.add_argument( '-a', '--agent', help='Specify the agent for the current file',", "agent_class in [_AGENT_DICT[i] for i in agent_strings if _AGENT_DICT[i].filename_match(whole_path)]: agent = agent_class(java_parser, whole_path,", "agent.skip(): agent.actions() return agent logger.error('Failed to match to any agent') def SetLogger(logging_level, filepath):", "log.removeHandler(log.handlers[0]) log.setLevel(logging_level) log.addHandler(fh) return log def CreateJavaParser(logging_level=logging.ERROR): logger = logging.getLogger('parser_logger') logger.setLevel(logging_level) return parser.Parser(logger)", "the current file', default='all') arguments = argument_parser.parse_args(sys.argv[1:]) logging_level = logging.INFO if arguments.verbose: logging_level", "class to convert', default=False, action='store_true') argument_parser.add_argument( '--no-skip', help='Do not skip the specified file',", "not agent.skip(): agent.actions() return agent logger.error('Failed to match to any agent') def SetLogger(logging_level,", "agent_class.__doc__.strip())) return if arguments.java_file and arguments.directory: raise Exception( 'Can not specify --jave-file and", "'--save-as-new', default=False, action='store_true', help='Save as a new file') argument_parser.add_argument( '-a', '--agent', help='Specify the", "raise Exception( 'Can not specify --jave-file and --directory at the same time') if", "_TEST_AGENT_DICT.keys() else: agents = [arguments.agent] java_parser = CreateJavaParser() if arguments.java_file: ConvertFile(java_parser, agents, arguments.java_file,", "import test_base_convert_agent import content_convert_agents import logging import argparse import os import sys _TEST_AGENT_DICT", "= False if len(log.handlers) > 0: log.removeHandler(log.handlers[0]) log.setLevel(logging_level) log.addHandler(fh) return log def CreateJavaParser(logging_level=logging.ERROR):", "chrome_convert_agents import webview_convert_agents import instrumentation_convert_agents import test_base_convert_agent import content_convert_agents import logging import argparse", "\"native-library-test\": content_convert_agents.NativeLibraryTestAgent, \"content-shell-test\": content_convert_agents.ContentShellTestAgent, \"dialog-overlay-impl-test\": content_convert_agents.DialogOverlayImplTestAgent, \"webview-test\": webview_convert_agents.WebViewTestAgent, \"cronet-test\": chrome_convert_agents.CronetTestAgent, \"partner-unit-test\": chrome_convert_agents.PartnerUnitTestAgent, \"sync-test\":", "logger = logging.getLogger('parser_logger') logger.setLevel(logging_level) return parser.Parser(logger) def main(): argument_parser = argparse.ArgumentParser() argument_parser.add_argument( '-u',", "in [_AGENT_DICT[i] for i in agent_strings if _AGENT_DICT[i].filename_match(whole_path)]: agent = agent_class(java_parser, whole_path, logger=logger,", "logging.getLogger('parser_logger') logger.setLevel(logging_level) return parser.Parser(logger) def main(): argument_parser = argparse.ArgumentParser() argument_parser.add_argument( '-u', '--use-base-class', help='Use", "test_base_convert_agent import content_convert_agents import logging import argparse import os import sys _TEST_AGENT_DICT =", "not specify --jave-file and --directory at the same time') if arguments.agent == 'all':", "whole_path) agent = previous_agent for agent_class in [_AGENT_DICT[i] for i in agent_strings if", "if arguments.agent == 'all': agents = _TEST_AGENT_DICT.keys() else: agents = [arguments.agent] java_parser =", "filename + ':%(levelname)s:%(module)s:%(lineno)s: %(message)s') fh = logging.StreamHandler() fh.setLevel(logging_level) fh.setFormatter(f) log.propagate = False if", "\"mojo-test\": chrome_convert_agents.MojoTestAgent, \"cast-test\": chrome_convert_agents.CastTestAgent, \"provider-test\": chrome_convert_agents.ProviderTestAgent, \"customtabs-test\": chrome_convert_agents.CustomTabActivityTestAgent, \"notification-test\": chrome_convert_agents.NotificationTestAgent, #\"download-test\": chrome_convert_agents.DownloadTestAgent, \"bottom-sheet-test\":", "import os import sys _TEST_AGENT_DICT = { \"chrome-base-test-case\": chrome_convert_agents.ChromeActivityBaseCaseAgent, \"chrome-permission-test\": chrome_convert_agents.PermissionTestAgent, \"chrome-tabbed-test\": chrome_convert_agents.ChromeTabbedTestAgent,", "\"cast-test\": chrome_convert_agents.CastTestAgent, \"provider-test\": chrome_convert_agents.ProviderTestAgent, \"customtabs-test\": chrome_convert_agents.CustomTabActivityTestAgent, \"notification-test\": chrome_convert_agents.NotificationTestAgent, #\"download-test\": chrome_convert_agents.DownloadTestAgent, \"bottom-sheet-test\": chrome_convert_agents.BottomSheetTestAgent, \"connectivity-checker-test\":", "ConvertFile(java_parser, agents, arguments.java_file, arguments.save_as_new, logging_level=logging_level, use_base_class=arguments.use_base_class) else: ConvertDirectory( arguments.directory, java_parser, agents, save_as_new=arguments.save_as_new, logging_level=logging_level,", "agent for the current file', default='all') arguments = argument_parser.parse_args(sys.argv[1:]) logging_level = logging.INFO if", "% (agent, agent_class.__doc__.strip())) return if arguments.java_file and arguments.directory: raise Exception( 'Can not specify", "base class to convert', default=False, action='store_true') argument_parser.add_argument( '--no-skip', help='Do not skip the specified", "in _AGENT_DICT.iteritems(): print(\"%25s:\\t%s\" % (agent, agent_class.__doc__.strip())) return if arguments.java_file and arguments.directory: raise Exception(", "'Can not specify --jave-file and --directory at the same time') if arguments.agent ==", "= SetLogger(logging_level, whole_path) agent = previous_agent for agent_class in [_AGENT_DICT[i] for i in", "current file', default='all') arguments = argument_parser.parse_args(sys.argv[1:]) logging_level = logging.INFO if arguments.verbose: logging_level =", "in os.walk(directory): for filename in filenames: whole_path = os.path.join(dirpath, filename) agent = ConvertFile(", "import argparse import os import sys _TEST_AGENT_DICT = { \"chrome-base-test-case\": chrome_convert_agents.ChromeActivityBaseCaseAgent, \"chrome-permission-test\": chrome_convert_agents.PermissionTestAgent,", "help='Save as a new file') argument_parser.add_argument( '-a', '--agent', help='Specify the agent for the", "= logging.INFO if arguments.verbose: logging_level = logging.DEBUG if arguments.list_agents: print('Available agents and description:\\n')", "help='Java file') argument_parser.add_argument('-d', '--directory', help='Directory where all java file lives') argument_parser.add_argument('-v', '--verbose', help='Log", "default='all') arguments = argument_parser.parse_args(sys.argv[1:]) logging_level = logging.INFO if arguments.verbose: logging_level = logging.DEBUG if", "instrumentation_convert_agents import test_base_convert_agent import content_convert_agents import logging import argparse import os import sys", "action='store_true') argument_parser.add_argument( '--no-skip', help='Do not skip the specified file', action='store_true', default=False) argument_parser.add_argument('-f', '--java-file',", "content_convert_agents import logging import argparse import os import sys _TEST_AGENT_DICT = { \"chrome-base-test-case\":", "chrome_convert_agents.CustomTabActivityTestAgent, \"notification-test\": chrome_convert_agents.NotificationTestAgent, #\"download-test\": chrome_convert_agents.DownloadTestAgent, \"bottom-sheet-test\": chrome_convert_agents.BottomSheetTestAgent, \"connectivity-checker-test\": content_convert_agents.ConnectivityCheckerTestAgent, \"tab-model-selector-observer-test\": content_convert_agents.SelectorObserverTest, \"native-library-test\": content_convert_agents.NativeLibraryTestAgent,", "log.setLevel(logging_level) log.addHandler(fh) return log def CreateJavaParser(logging_level=logging.ERROR): logger = logging.getLogger('parser_logger') logger.setLevel(logging_level) return parser.Parser(logger) def", "java_parser, agent_strings, save_as_new=False, logging_level=logging.WARNING, use_base_class=False): agent = None for (dirpath, _, filenames) in", "logger = SetLogger(logging_level, whole_path) agent = previous_agent for agent_class in [_AGENT_DICT[i] for i", "fh.setFormatter(f) log.propagate = False if len(log.handlers) > 0: log.removeHandler(log.handlers[0]) log.setLevel(logging_level) log.addHandler(fh) return log", "= None for (dirpath, _, filenames) in os.walk(directory): for filename in filenames: whole_path", "agent = agent_class(java_parser, whole_path, logger=logger, agent=agent, save_as_new=save_as_new, use_base_class=use_base_class) if agent._failed_to_parse: continue if use_base_class", "arguments = argument_parser.parse_args(sys.argv[1:]) logging_level = logging.INFO if arguments.verbose: logging_level = logging.DEBUG if arguments.list_agents:", "filename = filepath.split('/')[-1] f = logging.Formatter( filename + ':%(levelname)s:%(module)s:%(lineno)s: %(message)s') fh = logging.StreamHandler()", "= _TEST_AGENT_DICT.keys() else: agents = [arguments.agent] java_parser = CreateJavaParser() if arguments.java_file: ConvertFile(java_parser, agents,", "for filename in filenames: whole_path = os.path.join(dirpath, filename) agent = ConvertFile( java_parser, agent_strings,", "logging_level=logging.WARNING, use_base_class=False): logger = SetLogger(logging_level, whole_path) agent = previous_agent for agent_class in [_AGENT_DICT[i]", "\"customtabs-test\": chrome_convert_agents.CustomTabActivityTestAgent, \"notification-test\": chrome_convert_agents.NotificationTestAgent, #\"download-test\": chrome_convert_agents.DownloadTestAgent, \"bottom-sheet-test\": chrome_convert_agents.BottomSheetTestAgent, \"connectivity-checker-test\": content_convert_agents.ConnectivityCheckerTestAgent, \"tab-model-selector-observer-test\": content_convert_agents.SelectorObserverTest, \"native-library-test\":", "def SetLogger(logging_level, filepath): log = logging.getLogger() filename = filepath.split('/')[-1] f = logging.Formatter( filename", "main(): argument_parser = argparse.ArgumentParser() argument_parser.add_argument( '-u', '--use-base-class', help='Use another base class to convert',", "(dirpath, _, filenames) in os.walk(directory): for filename in filenames: whole_path = os.path.join(dirpath, filename)", "content_convert_agents.ContentShellTestAgent, \"dialog-overlay-impl-test\": content_convert_agents.DialogOverlayImplTestAgent, \"webview-test\": webview_convert_agents.WebViewTestAgent, \"cronet-test\": chrome_convert_agents.CronetTestAgent, \"partner-unit-test\": chrome_convert_agents.PartnerUnitTestAgent, \"sync-test\": chrome_convert_agents.SyncTestAgent, \"partner-integration-test\": chrome_convert_agents.PartnerIntegrationTestAgent,", "agents = [arguments.agent] java_parser = CreateJavaParser() if arguments.java_file: ConvertFile(java_parser, agents, arguments.java_file, arguments.save_as_new, logging_level=logging_level,", "0: log.removeHandler(log.handlers[0]) log.setLevel(logging_level) log.addHandler(fh) return log def CreateJavaParser(logging_level=logging.ERROR): logger = logging.getLogger('parser_logger') logger.setLevel(logging_level) return", "java_parser = CreateJavaParser() if arguments.java_file: ConvertFile(java_parser, agents, arguments.java_file, arguments.save_as_new, logging_level=logging_level, use_base_class=arguments.use_base_class) else: ConvertDirectory(", "None for (dirpath, _, filenames) in os.walk(directory): for filename in filenames: whole_path =", "use_base_class=use_base_class) if agent._failed_to_parse: continue if use_base_class or not agent.skip(): agent.actions() return agent logger.error('Failed", "if len(log.handlers) > 0: log.removeHandler(log.handlers[0]) log.setLevel(logging_level) log.addHandler(fh) return log def CreateJavaParser(logging_level=logging.ERROR): logger =", "to match to any agent') def SetLogger(logging_level, filepath): log = logging.getLogger() filename =", "webview_convert_agents import instrumentation_convert_agents import test_base_convert_agent import content_convert_agents import logging import argparse import os", "help='Specify the agent for the current file', default='all') arguments = argument_parser.parse_args(sys.argv[1:]) logging_level =", "= [arguments.agent] java_parser = CreateJavaParser() if arguments.java_file: ConvertFile(java_parser, agents, arguments.java_file, arguments.save_as_new, logging_level=logging_level, use_base_class=arguments.use_base_class)", "= ConvertFile( java_parser, agent_strings, whole_path, save_as_new, previous_agent=agent, logging_level=logging_level, use_base_class=use_base_class) def ConvertFile(java_parser, agent_strings, whole_path,", "_TEST_AGENT_DICT = { \"chrome-base-test-case\": chrome_convert_agents.ChromeActivityBaseCaseAgent, \"chrome-permission-test\": chrome_convert_agents.PermissionTestAgent, \"chrome-tabbed-test\": chrome_convert_agents.ChromeTabbedTestAgent, \"instrumentation\": instrumentation_convert_agents.InstrumentationTestCaseAgent, \"multiactivity-test\": chrome_convert_agents.MultiActivityTestAgent,", "argument_parser.add_argument( '--no-skip', help='Do not skip the specified file', action='store_true', default=False) argument_parser.add_argument('-f', '--java-file', help='Java", "argument_parser.parse_args(sys.argv[1:]) logging_level = logging.INFO if arguments.verbose: logging_level = logging.DEBUG if arguments.list_agents: print('Available agents", "match to any agent') def SetLogger(logging_level, filepath): log = logging.getLogger() filename = filepath.split('/')[-1]", "logging import argparse import os import sys _TEST_AGENT_DICT = { \"chrome-base-test-case\": chrome_convert_agents.ChromeActivityBaseCaseAgent, \"chrome-permission-test\":", "logging_level = logging.INFO if arguments.verbose: logging_level = logging.DEBUG if arguments.list_agents: print('Available agents and", "= agent_class(java_parser, whole_path, logger=logger, agent=agent, save_as_new=save_as_new, use_base_class=use_base_class) if agent._failed_to_parse: continue if use_base_class or", "fh = logging.StreamHandler() fh.setLevel(logging_level) fh.setFormatter(f) log.propagate = False if len(log.handlers) > 0: log.removeHandler(log.handlers[0])", "skip the specified file', action='store_true', default=False) argument_parser.add_argument('-f', '--java-file', help='Java file') argument_parser.add_argument('-d', '--directory', help='Directory", "os.walk(directory): for filename in filenames: whole_path = os.path.join(dirpath, filename) agent = ConvertFile( java_parser,", "to any agent') def SetLogger(logging_level, filepath): log = logging.getLogger() filename = filepath.split('/')[-1] f", "print('Available agents and description:\\n') for agent, agent_class in _AGENT_DICT.iteritems(): print(\"%25s:\\t%s\" % (agent, agent_class.__doc__.strip()))", "agents and description:\\n') for agent, agent_class in _AGENT_DICT.iteritems(): print(\"%25s:\\t%s\" % (agent, agent_class.__doc__.strip())) return", "help='Log info', action='store_true') argument_parser.add_argument( '-l', '--list-agents', help='List all available agents', action='store_true', default=False) argument_parser.add_argument('-n',", "action='store_true', help='Save as a new file') argument_parser.add_argument( '-a', '--agent', help='Specify the agent for", "agent_class(java_parser, whole_path, logger=logger, agent=agent, save_as_new=save_as_new, use_base_class=use_base_class) if agent._failed_to_parse: continue if use_base_class or not", "import chrome_convert_agents import webview_convert_agents import instrumentation_convert_agents import test_base_convert_agent import content_convert_agents import logging import", "filename) agent = ConvertFile( java_parser, agent_strings, whole_path, save_as_new, previous_agent=agent, logging_level=logging_level, use_base_class=use_base_class) def ConvertFile(java_parser,", "--jave-file and --directory at the same time') if arguments.agent == 'all': agents =", "argument_parser.add_argument( '-u', '--use-base-class', help='Use another base class to convert', default=False, action='store_true') argument_parser.add_argument( '--no-skip',", "for the current file', default='all') arguments = argument_parser.parse_args(sys.argv[1:]) logging_level = logging.INFO if arguments.verbose:", "agent = previous_agent for agent_class in [_AGENT_DICT[i] for i in agent_strings if _AGENT_DICT[i].filename_match(whole_path)]:", "{ \"chrome-base-test-case\": chrome_convert_agents.ChromeActivityBaseCaseAgent, \"chrome-permission-test\": chrome_convert_agents.PermissionTestAgent, \"chrome-tabbed-test\": chrome_convert_agents.ChromeTabbedTestAgent, \"instrumentation\": instrumentation_convert_agents.InstrumentationTestCaseAgent, \"multiactivity-test\": chrome_convert_agents.MultiActivityTestAgent, \"vr-test\": chrome_convert_agents.ChromeVrTestAgent,", "specified file', action='store_true', default=False) argument_parser.add_argument('-f', '--java-file', help='Java file') argument_parser.add_argument('-d', '--directory', help='Directory where all", "<reponame>yoland68/junit-auto-migrate #!/usr/bin/env python import parser import chrome_convert_agents import webview_convert_agents import instrumentation_convert_agents import test_base_convert_agent", "description:\\n') for agent, agent_class in _AGENT_DICT.iteritems(): print(\"%25s:\\t%s\" % (agent, agent_class.__doc__.strip())) return if arguments.java_file", "logging_level=logging.WARNING, use_base_class=False): agent = None for (dirpath, _, filenames) in os.walk(directory): for filename", "parser.Parser(logger) def main(): argument_parser = argparse.ArgumentParser() argument_parser.add_argument( '-u', '--use-base-class', help='Use another base class", "arguments.verbose: logging_level = logging.DEBUG if arguments.list_agents: print('Available agents and description:\\n') for agent, agent_class", "content_convert_agents.SelectorObserverTest, \"native-library-test\": content_convert_agents.NativeLibraryTestAgent, \"content-shell-test\": content_convert_agents.ContentShellTestAgent, \"dialog-overlay-impl-test\": content_convert_agents.DialogOverlayImplTestAgent, \"webview-test\": webview_convert_agents.WebViewTestAgent, \"cronet-test\": chrome_convert_agents.CronetTestAgent, \"partner-unit-test\": chrome_convert_agents.PartnerUnitTestAgent,", "instrumentation_convert_agents.InstrumentationTestCaseAgent, \"multiactivity-test\": chrome_convert_agents.MultiActivityTestAgent, \"vr-test\": chrome_convert_agents.ChromeVrTestAgent, \"payment-test\": chrome_convert_agents.PaymentRequestAgent, \"mojo-test\": chrome_convert_agents.MojoTestAgent, \"cast-test\": chrome_convert_agents.CastTestAgent, \"provider-test\": chrome_convert_agents.ProviderTestAgent,", "def ConvertFile(java_parser, agent_strings, whole_path, save_as_new, previous_agent=None, logging_level=logging.WARNING, use_base_class=False): logger = SetLogger(logging_level, whole_path) agent", "for agent, agent_class in _AGENT_DICT.iteritems(): print(\"%25s:\\t%s\" % (agent, agent_class.__doc__.strip())) return if arguments.java_file and", "java file lives') argument_parser.add_argument('-v', '--verbose', help='Log info', action='store_true') argument_parser.add_argument( '-l', '--list-agents', help='List all", "where all java file lives') argument_parser.add_argument('-v', '--verbose', help='Log info', action='store_true') argument_parser.add_argument( '-l', '--list-agents',", "agent_strings, whole_path, save_as_new, previous_agent=agent, logging_level=logging_level, use_base_class=use_base_class) def ConvertFile(java_parser, agent_strings, whole_path, save_as_new, previous_agent=None, logging_level=logging.WARNING,", "filepath.split('/')[-1] f = logging.Formatter( filename + ':%(levelname)s:%(module)s:%(lineno)s: %(message)s') fh = logging.StreamHandler() fh.setLevel(logging_level) fh.setFormatter(f)", "_TEST_AGENT_DICT.copy() _AGENT_DICT.update({\"base-class\": test_base_convert_agent.BaseCaseAgent}) def ConvertDirectory(directory, java_parser, agent_strings, save_as_new=False, logging_level=logging.WARNING, use_base_class=False): agent = None", "java_parser, agent_strings, whole_path, save_as_new, previous_agent=agent, logging_level=logging_level, use_base_class=use_base_class) def ConvertFile(java_parser, agent_strings, whole_path, save_as_new, previous_agent=None,", "return parser.Parser(logger) def main(): argument_parser = argparse.ArgumentParser() argument_parser.add_argument( '-u', '--use-base-class', help='Use another base", "filepath): log = logging.getLogger() filename = filepath.split('/')[-1] f = logging.Formatter( filename + ':%(levelname)s:%(module)s:%(lineno)s:", "Exception( 'Can not specify --jave-file and --directory at the same time') if arguments.agent", "def ConvertDirectory(directory, java_parser, agent_strings, save_as_new=False, logging_level=logging.WARNING, use_base_class=False): agent = None for (dirpath, _,", "logging.Formatter( filename + ':%(levelname)s:%(module)s:%(lineno)s: %(message)s') fh = logging.StreamHandler() fh.setLevel(logging_level) fh.setFormatter(f) log.propagate = False", "arguments.java_file and arguments.directory: raise Exception( 'Can not specify --jave-file and --directory at the", "previous_agent=agent, logging_level=logging_level, use_base_class=use_base_class) def ConvertFile(java_parser, agent_strings, whole_path, save_as_new, previous_agent=None, logging_level=logging.WARNING, use_base_class=False): logger =", "def main(): argument_parser = argparse.ArgumentParser() argument_parser.add_argument( '-u', '--use-base-class', help='Use another base class to", "not skip the specified file', action='store_true', default=False) argument_parser.add_argument('-f', '--java-file', help='Java file') argument_parser.add_argument('-d', '--directory',", "agent_strings, save_as_new=False, logging_level=logging.WARNING, use_base_class=False): agent = None for (dirpath, _, filenames) in os.walk(directory):", "default=False) argument_parser.add_argument('-f', '--java-file', help='Java file') argument_parser.add_argument('-d', '--directory', help='Directory where all java file lives')", "if use_base_class or not agent.skip(): agent.actions() return agent logger.error('Failed to match to any", "or not agent.skip(): agent.actions() return agent logger.error('Failed to match to any agent') def", "argument_parser.add_argument( '-a', '--agent', help='Specify the agent for the current file', default='all') arguments =", "chrome_convert_agents.SyncTestAgent, \"partner-integration-test\": chrome_convert_agents.PartnerIntegrationTestAgent, \"crash-test\": chrome_convert_agents.CrashTestAgent, } _AGENT_DICT = _TEST_AGENT_DICT.copy() _AGENT_DICT.update({\"base-class\": test_base_convert_agent.BaseCaseAgent}) def ConvertDirectory(directory,", "chrome_convert_agents.PermissionTestAgent, \"chrome-tabbed-test\": chrome_convert_agents.ChromeTabbedTestAgent, \"instrumentation\": instrumentation_convert_agents.InstrumentationTestCaseAgent, \"multiactivity-test\": chrome_convert_agents.MultiActivityTestAgent, \"vr-test\": chrome_convert_agents.ChromeVrTestAgent, \"payment-test\": chrome_convert_agents.PaymentRequestAgent, \"mojo-test\": chrome_convert_agents.MojoTestAgent,", "and description:\\n') for agent, agent_class in _AGENT_DICT.iteritems(): print(\"%25s:\\t%s\" % (agent, agent_class.__doc__.strip())) return if", "agent = None for (dirpath, _, filenames) in os.walk(directory): for filename in filenames:", "the same time') if arguments.agent == 'all': agents = _TEST_AGENT_DICT.keys() else: agents =", "save_as_new, previous_agent=None, logging_level=logging.WARNING, use_base_class=False): logger = SetLogger(logging_level, whole_path) agent = previous_agent for agent_class", "print(\"%25s:\\t%s\" % (agent, agent_class.__doc__.strip())) return if arguments.java_file and arguments.directory: raise Exception( 'Can not", "if _AGENT_DICT[i].filename_match(whole_path)]: agent = agent_class(java_parser, whole_path, logger=logger, agent=agent, save_as_new=save_as_new, use_base_class=use_base_class) if agent._failed_to_parse: continue", "\"sync-test\": chrome_convert_agents.SyncTestAgent, \"partner-integration-test\": chrome_convert_agents.PartnerIntegrationTestAgent, \"crash-test\": chrome_convert_agents.CrashTestAgent, } _AGENT_DICT = _TEST_AGENT_DICT.copy() _AGENT_DICT.update({\"base-class\": test_base_convert_agent.BaseCaseAgent}) def", "use_base_class=False): agent = None for (dirpath, _, filenames) in os.walk(directory): for filename in", "argument_parser.add_argument( '-l', '--list-agents', help='List all available agents', action='store_true', default=False) argument_parser.add_argument('-n', '--save-as-new', default=False, action='store_true',", "filenames) in os.walk(directory): for filename in filenames: whole_path = os.path.join(dirpath, filename) agent =", "arguments.list_agents: print('Available agents and description:\\n') for agent, agent_class in _AGENT_DICT.iteritems(): print(\"%25s:\\t%s\" % (agent,", "arguments.java_file, arguments.save_as_new, logging_level=logging_level, use_base_class=arguments.use_base_class) else: ConvertDirectory( arguments.directory, java_parser, agents, save_as_new=arguments.save_as_new, logging_level=logging_level, use_base_class=arguments.use_base_class) if", "chrome_convert_agents.MojoTestAgent, \"cast-test\": chrome_convert_agents.CastTestAgent, \"provider-test\": chrome_convert_agents.ProviderTestAgent, \"customtabs-test\": chrome_convert_agents.CustomTabActivityTestAgent, \"notification-test\": chrome_convert_agents.NotificationTestAgent, #\"download-test\": chrome_convert_agents.DownloadTestAgent, \"bottom-sheet-test\": chrome_convert_agents.BottomSheetTestAgent,", "logger.setLevel(logging_level) return parser.Parser(logger) def main(): argument_parser = argparse.ArgumentParser() argument_parser.add_argument( '-u', '--use-base-class', help='Use another", "\"partner-unit-test\": chrome_convert_agents.PartnerUnitTestAgent, \"sync-test\": chrome_convert_agents.SyncTestAgent, \"partner-integration-test\": chrome_convert_agents.PartnerIntegrationTestAgent, \"crash-test\": chrome_convert_agents.CrashTestAgent, } _AGENT_DICT = _TEST_AGENT_DICT.copy() _AGENT_DICT.update({\"base-class\":", "the agent for the current file', default='all') arguments = argument_parser.parse_args(sys.argv[1:]) logging_level = logging.INFO", "_AGENT_DICT.update({\"base-class\": test_base_convert_agent.BaseCaseAgent}) def ConvertDirectory(directory, java_parser, agent_strings, save_as_new=False, logging_level=logging.WARNING, use_base_class=False): agent = None for", "logging.getLogger() filename = filepath.split('/')[-1] f = logging.Formatter( filename + ':%(levelname)s:%(module)s:%(lineno)s: %(message)s') fh =", "log.propagate = False if len(log.handlers) > 0: log.removeHandler(log.handlers[0]) log.setLevel(logging_level) log.addHandler(fh) return log def", "= argparse.ArgumentParser() argument_parser.add_argument( '-u', '--use-base-class', help='Use another base class to convert', default=False, action='store_true')", "agent._failed_to_parse: continue if use_base_class or not agent.skip(): agent.actions() return agent logger.error('Failed to match", "use_base_class=use_base_class) def ConvertFile(java_parser, agent_strings, whole_path, save_as_new, previous_agent=None, logging_level=logging.WARNING, use_base_class=False): logger = SetLogger(logging_level, whole_path)", "\"partner-integration-test\": chrome_convert_agents.PartnerIntegrationTestAgent, \"crash-test\": chrome_convert_agents.CrashTestAgent, } _AGENT_DICT = _TEST_AGENT_DICT.copy() _AGENT_DICT.update({\"base-class\": test_base_convert_agent.BaseCaseAgent}) def ConvertDirectory(directory, java_parser,", "f = logging.Formatter( filename + ':%(levelname)s:%(module)s:%(lineno)s: %(message)s') fh = logging.StreamHandler() fh.setLevel(logging_level) fh.setFormatter(f) log.propagate", "file lives') argument_parser.add_argument('-v', '--verbose', help='Log info', action='store_true') argument_parser.add_argument( '-l', '--list-agents', help='List all available", "= logging.StreamHandler() fh.setLevel(logging_level) fh.setFormatter(f) log.propagate = False if len(log.handlers) > 0: log.removeHandler(log.handlers[0]) log.setLevel(logging_level)", "for i in agent_strings if _AGENT_DICT[i].filename_match(whole_path)]: agent = agent_class(java_parser, whole_path, logger=logger, agent=agent, save_as_new=save_as_new,", "use_base_class=False): logger = SetLogger(logging_level, whole_path) agent = previous_agent for agent_class in [_AGENT_DICT[i] for", "to convert', default=False, action='store_true') argument_parser.add_argument( '--no-skip', help='Do not skip the specified file', action='store_true',", "same time') if arguments.agent == 'all': agents = _TEST_AGENT_DICT.keys() else: agents = [arguments.agent]", "agent_class in _AGENT_DICT.iteritems(): print(\"%25s:\\t%s\" % (agent, agent_class.__doc__.strip())) return if arguments.java_file and arguments.directory: raise", "\"crash-test\": chrome_convert_agents.CrashTestAgent, } _AGENT_DICT = _TEST_AGENT_DICT.copy() _AGENT_DICT.update({\"base-class\": test_base_convert_agent.BaseCaseAgent}) def ConvertDirectory(directory, java_parser, agent_strings, save_as_new=False,", "chrome_convert_agents.PaymentRequestAgent, \"mojo-test\": chrome_convert_agents.MojoTestAgent, \"cast-test\": chrome_convert_agents.CastTestAgent, \"provider-test\": chrome_convert_agents.ProviderTestAgent, \"customtabs-test\": chrome_convert_agents.CustomTabActivityTestAgent, \"notification-test\": chrome_convert_agents.NotificationTestAgent, #\"download-test\": chrome_convert_agents.DownloadTestAgent,", "chrome_convert_agents.CastTestAgent, \"provider-test\": chrome_convert_agents.ProviderTestAgent, \"customtabs-test\": chrome_convert_agents.CustomTabActivityTestAgent, \"notification-test\": chrome_convert_agents.NotificationTestAgent, #\"download-test\": chrome_convert_agents.DownloadTestAgent, \"bottom-sheet-test\": chrome_convert_agents.BottomSheetTestAgent, \"connectivity-checker-test\": content_convert_agents.ConnectivityCheckerTestAgent,", "= CreateJavaParser() if arguments.java_file: ConvertFile(java_parser, agents, arguments.java_file, arguments.save_as_new, logging_level=logging_level, use_base_class=arguments.use_base_class) else: ConvertDirectory( arguments.directory,", "argparse.ArgumentParser() argument_parser.add_argument( '-u', '--use-base-class', help='Use another base class to convert', default=False, action='store_true') argument_parser.add_argument(", "at the same time') if arguments.agent == 'all': agents = _TEST_AGENT_DICT.keys() else: agents", "'all': agents = _TEST_AGENT_DICT.keys() else: agents = [arguments.agent] java_parser = CreateJavaParser() if arguments.java_file:", "if arguments.java_file: ConvertFile(java_parser, agents, arguments.java_file, arguments.save_as_new, logging_level=logging_level, use_base_class=arguments.use_base_class) else: ConvertDirectory( arguments.directory, java_parser, agents,", "'--use-base-class', help='Use another base class to convert', default=False, action='store_true') argument_parser.add_argument( '--no-skip', help='Do not", "#\"download-test\": chrome_convert_agents.DownloadTestAgent, \"bottom-sheet-test\": chrome_convert_agents.BottomSheetTestAgent, \"connectivity-checker-test\": content_convert_agents.ConnectivityCheckerTestAgent, \"tab-model-selector-observer-test\": content_convert_agents.SelectorObserverTest, \"native-library-test\": content_convert_agents.NativeLibraryTestAgent, \"content-shell-test\": content_convert_agents.ContentShellTestAgent, \"dialog-overlay-impl-test\":", "'--directory', help='Directory where all java file lives') argument_parser.add_argument('-v', '--verbose', help='Log info', action='store_true') argument_parser.add_argument(", "as a new file') argument_parser.add_argument( '-a', '--agent', help='Specify the agent for the current", "convert', default=False, action='store_true') argument_parser.add_argument( '--no-skip', help='Do not skip the specified file', action='store_true', default=False)", "in agent_strings if _AGENT_DICT[i].filename_match(whole_path)]: agent = agent_class(java_parser, whole_path, logger=logger, agent=agent, save_as_new=save_as_new, use_base_class=use_base_class) if", "_AGENT_DICT[i].filename_match(whole_path)]: agent = agent_class(java_parser, whole_path, logger=logger, agent=agent, save_as_new=save_as_new, use_base_class=use_base_class) if agent._failed_to_parse: continue if", "content_convert_agents.ConnectivityCheckerTestAgent, \"tab-model-selector-observer-test\": content_convert_agents.SelectorObserverTest, \"native-library-test\": content_convert_agents.NativeLibraryTestAgent, \"content-shell-test\": content_convert_agents.ContentShellTestAgent, \"dialog-overlay-impl-test\": content_convert_agents.DialogOverlayImplTestAgent, \"webview-test\": webview_convert_agents.WebViewTestAgent, \"cronet-test\": chrome_convert_agents.CronetTestAgent,", "if arguments.verbose: logging_level = logging.DEBUG if arguments.list_agents: print('Available agents and description:\\n') for agent,", "the specified file', action='store_true', default=False) argument_parser.add_argument('-f', '--java-file', help='Java file') argument_parser.add_argument('-d', '--directory', help='Directory where", "\"bottom-sheet-test\": chrome_convert_agents.BottomSheetTestAgent, \"connectivity-checker-test\": content_convert_agents.ConnectivityCheckerTestAgent, \"tab-model-selector-observer-test\": content_convert_agents.SelectorObserverTest, \"native-library-test\": content_convert_agents.NativeLibraryTestAgent, \"content-shell-test\": content_convert_agents.ContentShellTestAgent, \"dialog-overlay-impl-test\": content_convert_agents.DialogOverlayImplTestAgent, \"webview-test\":", "SetLogger(logging_level, filepath): log = logging.getLogger() filename = filepath.split('/')[-1] f = logging.Formatter( filename +", "test_base_convert_agent.BaseCaseAgent}) def ConvertDirectory(directory, java_parser, agent_strings, save_as_new=False, logging_level=logging.WARNING, use_base_class=False): agent = None for (dirpath,", "else: ConvertDirectory( arguments.directory, java_parser, agents, save_as_new=arguments.save_as_new, logging_level=logging_level, use_base_class=arguments.use_base_class) if __name__ == '__main__': main()", "content_convert_agents.NativeLibraryTestAgent, \"content-shell-test\": content_convert_agents.ContentShellTestAgent, \"dialog-overlay-impl-test\": content_convert_agents.DialogOverlayImplTestAgent, \"webview-test\": webview_convert_agents.WebViewTestAgent, \"cronet-test\": chrome_convert_agents.CronetTestAgent, \"partner-unit-test\": chrome_convert_agents.PartnerUnitTestAgent, \"sync-test\": chrome_convert_agents.SyncTestAgent,", "[arguments.agent] java_parser = CreateJavaParser() if arguments.java_file: ConvertFile(java_parser, agents, arguments.java_file, arguments.save_as_new, logging_level=logging_level, use_base_class=arguments.use_base_class) else:", "= argument_parser.parse_args(sys.argv[1:]) logging_level = logging.INFO if arguments.verbose: logging_level = logging.DEBUG if arguments.list_agents: print('Available", "file') argument_parser.add_argument( '-a', '--agent', help='Specify the agent for the current file', default='all') arguments", "all java file lives') argument_parser.add_argument('-v', '--verbose', help='Log info', action='store_true') argument_parser.add_argument( '-l', '--list-agents', help='List", "agent logger.error('Failed to match to any agent') def SetLogger(logging_level, filepath): log = logging.getLogger()", "fh.setLevel(logging_level) fh.setFormatter(f) log.propagate = False if len(log.handlers) > 0: log.removeHandler(log.handlers[0]) log.setLevel(logging_level) log.addHandler(fh) return", "action='store_true', default=False) argument_parser.add_argument('-f', '--java-file', help='Java file') argument_parser.add_argument('-d', '--directory', help='Directory where all java file", "info', action='store_true') argument_parser.add_argument( '-l', '--list-agents', help='List all available agents', action='store_true', default=False) argument_parser.add_argument('-n', '--save-as-new',", "argument_parser.add_argument('-f', '--java-file', help='Java file') argument_parser.add_argument('-d', '--directory', help='Directory where all java file lives') argument_parser.add_argument('-v',", "and arguments.directory: raise Exception( 'Can not specify --jave-file and --directory at the same", "help='Do not skip the specified file', action='store_true', default=False) argument_parser.add_argument('-f', '--java-file', help='Java file') argument_parser.add_argument('-d',", "\"chrome-tabbed-test\": chrome_convert_agents.ChromeTabbedTestAgent, \"instrumentation\": instrumentation_convert_agents.InstrumentationTestCaseAgent, \"multiactivity-test\": chrome_convert_agents.MultiActivityTestAgent, \"vr-test\": chrome_convert_agents.ChromeVrTestAgent, \"payment-test\": chrome_convert_agents.PaymentRequestAgent, \"mojo-test\": chrome_convert_agents.MojoTestAgent, \"cast-test\":", "i in agent_strings if _AGENT_DICT[i].filename_match(whole_path)]: agent = agent_class(java_parser, whole_path, logger=logger, agent=agent, save_as_new=save_as_new, use_base_class=use_base_class)", "\"tab-model-selector-observer-test\": content_convert_agents.SelectorObserverTest, \"native-library-test\": content_convert_agents.NativeLibraryTestAgent, \"content-shell-test\": content_convert_agents.ContentShellTestAgent, \"dialog-overlay-impl-test\": content_convert_agents.DialogOverlayImplTestAgent, \"webview-test\": webview_convert_agents.WebViewTestAgent, \"cronet-test\": chrome_convert_agents.CronetTestAgent, \"partner-unit-test\":", "'-l', '--list-agents', help='List all available agents', action='store_true', default=False) argument_parser.add_argument('-n', '--save-as-new', default=False, action='store_true', help='Save", "= filepath.split('/')[-1] f = logging.Formatter( filename + ':%(levelname)s:%(module)s:%(lineno)s: %(message)s') fh = logging.StreamHandler() fh.setLevel(logging_level)", "= { \"chrome-base-test-case\": chrome_convert_agents.ChromeActivityBaseCaseAgent, \"chrome-permission-test\": chrome_convert_agents.PermissionTestAgent, \"chrome-tabbed-test\": chrome_convert_agents.ChromeTabbedTestAgent, \"instrumentation\": instrumentation_convert_agents.InstrumentationTestCaseAgent, \"multiactivity-test\": chrome_convert_agents.MultiActivityTestAgent, \"vr-test\":", "chrome_convert_agents.ChromeVrTestAgent, \"payment-test\": chrome_convert_agents.PaymentRequestAgent, \"mojo-test\": chrome_convert_agents.MojoTestAgent, \"cast-test\": chrome_convert_agents.CastTestAgent, \"provider-test\": chrome_convert_agents.ProviderTestAgent, \"customtabs-test\": chrome_convert_agents.CustomTabActivityTestAgent, \"notification-test\": chrome_convert_agents.NotificationTestAgent,", "--directory at the same time') if arguments.agent == 'all': agents = _TEST_AGENT_DICT.keys() else:", "logging.INFO if arguments.verbose: logging_level = logging.DEBUG if arguments.list_agents: print('Available agents and description:\\n') for", "return log def CreateJavaParser(logging_level=logging.ERROR): logger = logging.getLogger('parser_logger') logger.setLevel(logging_level) return parser.Parser(logger) def main(): argument_parser", "if arguments.list_agents: print('Available agents and description:\\n') for agent, agent_class in _AGENT_DICT.iteritems(): print(\"%25s:\\t%s\" %", "file', default='all') arguments = argument_parser.parse_args(sys.argv[1:]) logging_level = logging.INFO if arguments.verbose: logging_level = logging.DEBUG", "argument_parser.add_argument('-n', '--save-as-new', default=False, action='store_true', help='Save as a new file') argument_parser.add_argument( '-a', '--agent', help='Specify", "arguments.agent == 'all': agents = _TEST_AGENT_DICT.keys() else: agents = [arguments.agent] java_parser = CreateJavaParser()", "available agents', action='store_true', default=False) argument_parser.add_argument('-n', '--save-as-new', default=False, action='store_true', help='Save as a new file')", "_AGENT_DICT = _TEST_AGENT_DICT.copy() _AGENT_DICT.update({\"base-class\": test_base_convert_agent.BaseCaseAgent}) def ConvertDirectory(directory, java_parser, agent_strings, save_as_new=False, logging_level=logging.WARNING, use_base_class=False): agent", "chrome_convert_agents.PartnerIntegrationTestAgent, \"crash-test\": chrome_convert_agents.CrashTestAgent, } _AGENT_DICT = _TEST_AGENT_DICT.copy() _AGENT_DICT.update({\"base-class\": test_base_convert_agent.BaseCaseAgent}) def ConvertDirectory(directory, java_parser, agent_strings,", "= logging.getLogger() filename = filepath.split('/')[-1] f = logging.Formatter( filename + ':%(levelname)s:%(module)s:%(lineno)s: %(message)s') fh", "'--no-skip', help='Do not skip the specified file', action='store_true', default=False) argument_parser.add_argument('-f', '--java-file', help='Java file')", "'--agent', help='Specify the agent for the current file', default='all') arguments = argument_parser.parse_args(sys.argv[1:]) logging_level", "action='store_true', default=False) argument_parser.add_argument('-n', '--save-as-new', default=False, action='store_true', help='Save as a new file') argument_parser.add_argument( '-a',", "'-u', '--use-base-class', help='Use another base class to convert', default=False, action='store_true') argument_parser.add_argument( '--no-skip', help='Do", "return if arguments.java_file and arguments.directory: raise Exception( 'Can not specify --jave-file and --directory", "previous_agent=None, logging_level=logging.WARNING, use_base_class=False): logger = SetLogger(logging_level, whole_path) agent = previous_agent for agent_class in", "default=False, action='store_true') argument_parser.add_argument( '--no-skip', help='Do not skip the specified file', action='store_true', default=False) argument_parser.add_argument('-f',", "use_base_class or not agent.skip(): agent.actions() return agent logger.error('Failed to match to any agent')", "':%(levelname)s:%(module)s:%(lineno)s: %(message)s') fh = logging.StreamHandler() fh.setLevel(logging_level) fh.setFormatter(f) log.propagate = False if len(log.handlers) >", "\"chrome-permission-test\": chrome_convert_agents.PermissionTestAgent, \"chrome-tabbed-test\": chrome_convert_agents.ChromeTabbedTestAgent, \"instrumentation\": instrumentation_convert_agents.InstrumentationTestCaseAgent, \"multiactivity-test\": chrome_convert_agents.MultiActivityTestAgent, \"vr-test\": chrome_convert_agents.ChromeVrTestAgent, \"payment-test\": chrome_convert_agents.PaymentRequestAgent, \"mojo-test\":", "help='Directory where all java file lives') argument_parser.add_argument('-v', '--verbose', help='Log info', action='store_true') argument_parser.add_argument( '-l',", "log = logging.getLogger() filename = filepath.split('/')[-1] f = logging.Formatter( filename + ':%(levelname)s:%(module)s:%(lineno)s: %(message)s')", "parser import chrome_convert_agents import webview_convert_agents import instrumentation_convert_agents import test_base_convert_agent import content_convert_agents import logging", "agents = _TEST_AGENT_DICT.keys() else: agents = [arguments.agent] java_parser = CreateJavaParser() if arguments.java_file: ConvertFile(java_parser,", "\"multiactivity-test\": chrome_convert_agents.MultiActivityTestAgent, \"vr-test\": chrome_convert_agents.ChromeVrTestAgent, \"payment-test\": chrome_convert_agents.PaymentRequestAgent, \"mojo-test\": chrome_convert_agents.MojoTestAgent, \"cast-test\": chrome_convert_agents.CastTestAgent, \"provider-test\": chrome_convert_agents.ProviderTestAgent, \"customtabs-test\":", "> 0: log.removeHandler(log.handlers[0]) log.setLevel(logging_level) log.addHandler(fh) return log def CreateJavaParser(logging_level=logging.ERROR): logger = logging.getLogger('parser_logger') logger.setLevel(logging_level)", "all available agents', action='store_true', default=False) argument_parser.add_argument('-n', '--save-as-new', default=False, action='store_true', help='Save as a new", "chrome_convert_agents.ProviderTestAgent, \"customtabs-test\": chrome_convert_agents.CustomTabActivityTestAgent, \"notification-test\": chrome_convert_agents.NotificationTestAgent, #\"download-test\": chrome_convert_agents.DownloadTestAgent, \"bottom-sheet-test\": chrome_convert_agents.BottomSheetTestAgent, \"connectivity-checker-test\": content_convert_agents.ConnectivityCheckerTestAgent, \"tab-model-selector-observer-test\": content_convert_agents.SelectorObserverTest,", "[_AGENT_DICT[i] for i in agent_strings if _AGENT_DICT[i].filename_match(whole_path)]: agent = agent_class(java_parser, whole_path, logger=logger, agent=agent,", "\"cronet-test\": chrome_convert_agents.CronetTestAgent, \"partner-unit-test\": chrome_convert_agents.PartnerUnitTestAgent, \"sync-test\": chrome_convert_agents.SyncTestAgent, \"partner-integration-test\": chrome_convert_agents.PartnerIntegrationTestAgent, \"crash-test\": chrome_convert_agents.CrashTestAgent, } _AGENT_DICT =", "save_as_new, previous_agent=agent, logging_level=logging_level, use_base_class=use_base_class) def ConvertFile(java_parser, agent_strings, whole_path, save_as_new, previous_agent=None, logging_level=logging.WARNING, use_base_class=False): logger", "log.addHandler(fh) return log def CreateJavaParser(logging_level=logging.ERROR): logger = logging.getLogger('parser_logger') logger.setLevel(logging_level) return parser.Parser(logger) def main():", "filename in filenames: whole_path = os.path.join(dirpath, filename) agent = ConvertFile( java_parser, agent_strings, whole_path,", "chrome_convert_agents.CronetTestAgent, \"partner-unit-test\": chrome_convert_agents.PartnerUnitTestAgent, \"sync-test\": chrome_convert_agents.SyncTestAgent, \"partner-integration-test\": chrome_convert_agents.PartnerIntegrationTestAgent, \"crash-test\": chrome_convert_agents.CrashTestAgent, } _AGENT_DICT = _TEST_AGENT_DICT.copy()", "save_as_new=False, logging_level=logging.WARNING, use_base_class=False): agent = None for (dirpath, _, filenames) in os.walk(directory): for", "'--verbose', help='Log info', action='store_true') argument_parser.add_argument( '-l', '--list-agents', help='List all available agents', action='store_true', default=False)", "def CreateJavaParser(logging_level=logging.ERROR): logger = logging.getLogger('parser_logger') logger.setLevel(logging_level) return parser.Parser(logger) def main(): argument_parser = argparse.ArgumentParser()", "in filenames: whole_path = os.path.join(dirpath, filename) agent = ConvertFile( java_parser, agent_strings, whole_path, save_as_new,", "chrome_convert_agents.NotificationTestAgent, #\"download-test\": chrome_convert_agents.DownloadTestAgent, \"bottom-sheet-test\": chrome_convert_agents.BottomSheetTestAgent, \"connectivity-checker-test\": content_convert_agents.ConnectivityCheckerTestAgent, \"tab-model-selector-observer-test\": content_convert_agents.SelectorObserverTest, \"native-library-test\": content_convert_agents.NativeLibraryTestAgent, \"content-shell-test\": content_convert_agents.ContentShellTestAgent,", "webview_convert_agents.WebViewTestAgent, \"cronet-test\": chrome_convert_agents.CronetTestAgent, \"partner-unit-test\": chrome_convert_agents.PartnerUnitTestAgent, \"sync-test\": chrome_convert_agents.SyncTestAgent, \"partner-integration-test\": chrome_convert_agents.PartnerIntegrationTestAgent, \"crash-test\": chrome_convert_agents.CrashTestAgent, } _AGENT_DICT", "time') if arguments.agent == 'all': agents = _TEST_AGENT_DICT.keys() else: agents = [arguments.agent] java_parser", "_AGENT_DICT.iteritems(): print(\"%25s:\\t%s\" % (agent, agent_class.__doc__.strip())) return if arguments.java_file and arguments.directory: raise Exception( 'Can", "agents', action='store_true', default=False) argument_parser.add_argument('-n', '--save-as-new', default=False, action='store_true', help='Save as a new file') argument_parser.add_argument(", "import sys _TEST_AGENT_DICT = { \"chrome-base-test-case\": chrome_convert_agents.ChromeActivityBaseCaseAgent, \"chrome-permission-test\": chrome_convert_agents.PermissionTestAgent, \"chrome-tabbed-test\": chrome_convert_agents.ChromeTabbedTestAgent, \"instrumentation\": instrumentation_convert_agents.InstrumentationTestCaseAgent,", "agent_strings if _AGENT_DICT[i].filename_match(whole_path)]: agent = agent_class(java_parser, whole_path, logger=logger, agent=agent, save_as_new=save_as_new, use_base_class=use_base_class) if agent._failed_to_parse:", "chrome_convert_agents.CrashTestAgent, } _AGENT_DICT = _TEST_AGENT_DICT.copy() _AGENT_DICT.update({\"base-class\": test_base_convert_agent.BaseCaseAgent}) def ConvertDirectory(directory, java_parser, agent_strings, save_as_new=False, logging_level=logging.WARNING,", "argument_parser.add_argument('-d', '--directory', help='Directory where all java file lives') argument_parser.add_argument('-v', '--verbose', help='Log info', action='store_true')", "\"vr-test\": chrome_convert_agents.ChromeVrTestAgent, \"payment-test\": chrome_convert_agents.PaymentRequestAgent, \"mojo-test\": chrome_convert_agents.MojoTestAgent, \"cast-test\": chrome_convert_agents.CastTestAgent, \"provider-test\": chrome_convert_agents.ProviderTestAgent, \"customtabs-test\": chrome_convert_agents.CustomTabActivityTestAgent, \"notification-test\":", "\"content-shell-test\": content_convert_agents.ContentShellTestAgent, \"dialog-overlay-impl-test\": content_convert_agents.DialogOverlayImplTestAgent, \"webview-test\": webview_convert_agents.WebViewTestAgent, \"cronet-test\": chrome_convert_agents.CronetTestAgent, \"partner-unit-test\": chrome_convert_agents.PartnerUnitTestAgent, \"sync-test\": chrome_convert_agents.SyncTestAgent, \"partner-integration-test\":", "file') argument_parser.add_argument('-d', '--directory', help='Directory where all java file lives') argument_parser.add_argument('-v', '--verbose', help='Log info',", "\"notification-test\": chrome_convert_agents.NotificationTestAgent, #\"download-test\": chrome_convert_agents.DownloadTestAgent, \"bottom-sheet-test\": chrome_convert_agents.BottomSheetTestAgent, \"connectivity-checker-test\": content_convert_agents.ConnectivityCheckerTestAgent, \"tab-model-selector-observer-test\": content_convert_agents.SelectorObserverTest, \"native-library-test\": content_convert_agents.NativeLibraryTestAgent, \"content-shell-test\":", "action='store_true') argument_parser.add_argument( '-l', '--list-agents', help='List all available agents', action='store_true', default=False) argument_parser.add_argument('-n', '--save-as-new', default=False,", "CreateJavaParser(logging_level=logging.ERROR): logger = logging.getLogger('parser_logger') logger.setLevel(logging_level) return parser.Parser(logger) def main(): argument_parser = argparse.ArgumentParser() argument_parser.add_argument(", "sys _TEST_AGENT_DICT = { \"chrome-base-test-case\": chrome_convert_agents.ChromeActivityBaseCaseAgent, \"chrome-permission-test\": chrome_convert_agents.PermissionTestAgent, \"chrome-tabbed-test\": chrome_convert_agents.ChromeTabbedTestAgent, \"instrumentation\": instrumentation_convert_agents.InstrumentationTestCaseAgent, \"multiactivity-test\":", "#!/usr/bin/env python import parser import chrome_convert_agents import webview_convert_agents import instrumentation_convert_agents import test_base_convert_agent import", "help='List all available agents', action='store_true', default=False) argument_parser.add_argument('-n', '--save-as-new', default=False, action='store_true', help='Save as a", "'--list-agents', help='List all available agents', action='store_true', default=False) argument_parser.add_argument('-n', '--save-as-new', default=False, action='store_true', help='Save as", "logging.StreamHandler() fh.setLevel(logging_level) fh.setFormatter(f) log.propagate = False if len(log.handlers) > 0: log.removeHandler(log.handlers[0]) log.setLevel(logging_level) log.addHandler(fh)" ]
[ "event index specified is associated it. If it is, call that function with", "Get plugin commands for plugin in plugins_list: if (plugin[0] in ('.', '_') or", "len(valid_plugins): logging.debug(\"Loaded {} plugin(s)\".format(len(valid_plugins))) bot.plugins = valid_plugins def broadcast_event(bot, event, *args, **kwargs): \"\"\"", "(renamed to plugin for the user). In addition, this also sets the commands", "import ErrorTypes, BotException EXCEPTION = 'Plugins' def add_plugins(bot): \"\"\" Gets a list of", "def add_plugins(bot): \"\"\" Gets a list of all of the plugins and stores", "this also sets the commands given by each plugin. \"\"\" directory = '{}/plugins'.format(bot.path)", "plugin_commands = base.get_commands() commands.add_commands(bot, plugin_commands, base) valid_plugins['base'] = [base, plugin_commands] # Get plugin", "not plugin.endswith('.py')): continue try: spec = importlib.util.spec_from_file_location( plugin, '{}/{}'.format(directory, plugin)) module = importlib.util.module_from_spec(spec)", "EXCEPTION, \"Failed to import external plugin\", plugin, e=e, error_type=ErrorTypes.STARTUP) else: logging.debug(\"Adding plugin {}\".format(plugin))", "importlib.util.module_from_spec(spec) spec.loader.exec_module(module) plugin_commands = module.get_commands() commands.add_commands(bot, plugin_commands, module) except Exception as e: traceback.print_exc()", "Loops through all of the plugins and looks to see if the event", "sets the commands given by each plugin. \"\"\" directory = '{}/plugins'.format(bot.path) data_directory =", "through all of the plugins and looks to see if the event index", "e=e, error_type=ErrorTypes.STARTUP) else: logging.debug(\"Adding plugin {}\".format(plugin)) valid_plugins[plugin] = [module, plugin_commands] if len(valid_plugins): logging.debug(\"Loaded", "= {} # Add base plugin from jshbot import base plugin_commands = base.get_commands()", "jshbot import commands from jshbot.exceptions import ErrorTypes, BotException EXCEPTION = 'Plugins' def add_plugins(bot):", "BotException EXCEPTION = 'Plugins' def add_plugins(bot): \"\"\" Gets a list of all of", "args. \"\"\" for plugin in bot.plugins.values(): function = getattr(plugin[0], event, None) if function:", "import traceback from jshbot import commands from jshbot.exceptions import ErrorTypes, BotException EXCEPTION =", "event, *args, **kwargs): \"\"\" Loops through all of the plugins and looks to", "def broadcast_event(bot, event, *args, **kwargs): \"\"\" Loops through all of the plugins and", "# Debug import traceback from jshbot import commands from jshbot.exceptions import ErrorTypes, BotException", "jshbot import base plugin_commands = base.get_commands() commands.add_commands(bot, plugin_commands, base) valid_plugins['base'] = [base, plugin_commands]", "'Plugins' def add_plugins(bot): \"\"\" Gets a list of all of the plugins and", "all of the plugins and stores them as a key/value pair of the", "a key/value pair of the plugin name and the module itself (renamed to", "plugin {}\".format(plugin)) valid_plugins[plugin] = [module, plugin_commands] if len(valid_plugins): logging.debug(\"Loaded {} plugin(s)\".format(len(valid_plugins))) bot.plugins =", "Debug import traceback from jshbot import commands from jshbot.exceptions import ErrorTypes, BotException EXCEPTION", "raise BotException( EXCEPTION, \"Plugins directory not found\", error_type=ErrorTypes.STARTUP) valid_plugins = {} # Add", "the plugins and looks to see if the event index specified is associated", "and looks to see if the event index specified is associated it. If", "plugin)) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) plugin_commands = module.get_commands() commands.add_commands(bot, plugin_commands, module) except Exception", "\"\"\" Gets a list of all of the plugins and stores them as", "to import external plugin\", plugin, e=e, error_type=ErrorTypes.STARTUP) else: logging.debug(\"Adding plugin {}\".format(plugin)) valid_plugins[plugin] =", "plugin import path.\") sys.path.append(data_directory) try: plugins_list = os.listdir(directory) except FileNotFoundError: raise BotException( EXCEPTION,", "**kwargs): \"\"\" Loops through all of the plugins and looks to see if", "and the module itself (renamed to plugin for the user). In addition, this", "found\", error_type=ErrorTypes.STARTUP) valid_plugins = {} # Add base plugin from jshbot import base", "or plugin == 'base' or not plugin.endswith('.py')): continue try: spec = importlib.util.spec_from_file_location( plugin,", "traceback from jshbot import commands from jshbot.exceptions import ErrorTypes, BotException EXCEPTION = 'Plugins'", "it. If it is, call that function with args. \"\"\" for plugin in", "None) if function: try: asyncio.ensure_future(function(bot, *args, **kwargs)) except TypeError as e: logging.error(traceback.format_exc()) logging.error(\"Bypassing", "import logging import importlib.util import os.path import sys # Debug import traceback from", "{}\".format(plugin)) valid_plugins[plugin] = [module, plugin_commands] if len(valid_plugins): logging.debug(\"Loaded {} plugin(s)\".format(len(valid_plugins))) bot.plugins = valid_plugins", "see if the event index specified is associated it. If it is, call", "base) valid_plugins['base'] = [base, plugin_commands] # Get plugin commands for plugin in plugins_list:", "asyncio import logging import importlib.util import os.path import sys # Debug import traceback", "Exception as e: traceback.print_exc() raise BotException( EXCEPTION, \"Failed to import external plugin\", plugin,", "is, call that function with args. \"\"\" for plugin in bot.plugins.values(): function =", "module itself (renamed to plugin for the user). In addition, this also sets", "valid_plugins def broadcast_event(bot, event, *args, **kwargs): \"\"\" Loops through all of the plugins", "\"Failed to import external plugin\", plugin, e=e, error_type=ErrorTypes.STARTUP) else: logging.debug(\"Adding plugin {}\".format(plugin)) valid_plugins[plugin]", "key/value pair of the plugin name and the module itself (renamed to plugin", "valid_plugins[plugin] = [module, plugin_commands] if len(valid_plugins): logging.debug(\"Loaded {} plugin(s)\".format(len(valid_plugins))) bot.plugins = valid_plugins def", "asyncio.ensure_future(function(bot, *args, **kwargs)) except TypeError as e: logging.error(traceback.format_exc()) logging.error(\"Bypassing event error: \" +", "import commands from jshbot.exceptions import ErrorTypes, BotException EXCEPTION = 'Plugins' def add_plugins(bot): \"\"\"", "base.get_commands() commands.add_commands(bot, plugin_commands, base) valid_plugins['base'] = [base, plugin_commands] # Get plugin commands for", "Gets a list of all of the plugins and stores them as a", "{} # Add base plugin from jshbot import base plugin_commands = base.get_commands() commands.add_commands(bot,", "or not plugin.endswith('.py')): continue try: spec = importlib.util.spec_from_file_location( plugin, '{}/{}'.format(directory, plugin)) module =", "addition, this also sets the commands given by each plugin. \"\"\" directory =", "(plugin[0] in ('.', '_') or plugin == 'base' or not plugin.endswith('.py')): continue try:", "of the plugins and looks to see if the event index specified is", "for plugin in bot.plugins.values(): function = getattr(plugin[0], event, None) if function: try: asyncio.ensure_future(function(bot,", "plugins and looks to see if the event index specified is associated it.", "with args. \"\"\" for plugin in bot.plugins.values(): function = getattr(plugin[0], event, None) if", "\"\"\" Loops through all of the plugins and looks to see if the", "e: traceback.print_exc() raise BotException( EXCEPTION, \"Failed to import external plugin\", plugin, e=e, error_type=ErrorTypes.STARTUP)", "else: logging.debug(\"Adding plugin {}\".format(plugin)) valid_plugins[plugin] = [module, plugin_commands] if len(valid_plugins): logging.debug(\"Loaded {} plugin(s)\".format(len(valid_plugins)))", "\"\"\" for plugin in bot.plugins.values(): function = getattr(plugin[0], event, None) if function: try:", "[base, plugin_commands] # Get plugin commands for plugin in plugins_list: if (plugin[0] in", "importlib.util.spec_from_file_location( plugin, '{}/{}'.format(directory, plugin)) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) plugin_commands = module.get_commands() commands.add_commands(bot, plugin_commands,", "all of the plugins and looks to see if the event index specified", "and stores them as a key/value pair of the plugin name and the", "plugin name and the module itself (renamed to plugin for the user). In", "looks to see if the event index specified is associated it. If it", "import importlib.util import os.path import sys # Debug import traceback from jshbot import", "the plugins and stores them as a key/value pair of the plugin name", "spec.loader.exec_module(module) plugin_commands = module.get_commands() commands.add_commands(bot, plugin_commands, module) except Exception as e: traceback.print_exc() raise", "add_plugins(bot): \"\"\" Gets a list of all of the plugins and stores them", "also sets the commands given by each plugin. \"\"\" directory = '{}/plugins'.format(bot.path) data_directory", "the event index specified is associated it. If it is, call that function", "'{}/plugins'.format(bot.path) data_directory = '{}/plugins/plugin_data'.format(bot.path) if os.path.isdir(data_directory): logging.debug(\"Setting plugin_data as plugin import path.\") sys.path.append(data_directory)", "commands.add_commands(bot, plugin_commands, module) except Exception as e: traceback.print_exc() raise BotException( EXCEPTION, \"Failed to", "= importlib.util.spec_from_file_location( plugin, '{}/{}'.format(directory, plugin)) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) plugin_commands = module.get_commands() commands.add_commands(bot,", "logging.debug(\"Loaded {} plugin(s)\".format(len(valid_plugins))) bot.plugins = valid_plugins def broadcast_event(bot, event, *args, **kwargs): \"\"\" Loops", "sys # Debug import traceback from jshbot import commands from jshbot.exceptions import ErrorTypes,", "valid_plugins['base'] = [base, plugin_commands] # Get plugin commands for plugin in plugins_list: if", "EXCEPTION, \"Plugins directory not found\", error_type=ErrorTypes.STARTUP) valid_plugins = {} # Add base plugin", "plugin_commands = module.get_commands() commands.add_commands(bot, plugin_commands, module) except Exception as e: traceback.print_exc() raise BotException(", "ErrorTypes, BotException EXCEPTION = 'Plugins' def add_plugins(bot): \"\"\" Gets a list of all", "from jshbot.exceptions import ErrorTypes, BotException EXCEPTION = 'Plugins' def add_plugins(bot): \"\"\" Gets a", "os.listdir(directory) except FileNotFoundError: raise BotException( EXCEPTION, \"Plugins directory not found\", error_type=ErrorTypes.STARTUP) valid_plugins =", "given by each plugin. \"\"\" directory = '{}/plugins'.format(bot.path) data_directory = '{}/plugins/plugin_data'.format(bot.path) if os.path.isdir(data_directory):", "jshbot.exceptions import ErrorTypes, BotException EXCEPTION = 'Plugins' def add_plugins(bot): \"\"\" Gets a list", "EXCEPTION = 'Plugins' def add_plugins(bot): \"\"\" Gets a list of all of the", "BotException( EXCEPTION, \"Failed to import external plugin\", plugin, e=e, error_type=ErrorTypes.STARTUP) else: logging.debug(\"Adding plugin", "bot.plugins.values(): function = getattr(plugin[0], event, None) if function: try: asyncio.ensure_future(function(bot, *args, **kwargs)) except", "is associated it. If it is, call that function with args. \"\"\" for", "the module itself (renamed to plugin for the user). In addition, this also", "*args, **kwargs)) except TypeError as e: logging.error(traceback.format_exc()) logging.error(\"Bypassing event error: \" + e)", "for the user). In addition, this also sets the commands given by each", "list of all of the plugins and stores them as a key/value pair", "plugin, '{}/{}'.format(directory, plugin)) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) plugin_commands = module.get_commands() commands.add_commands(bot, plugin_commands, module)", "if function: try: asyncio.ensure_future(function(bot, *args, **kwargs)) except TypeError as e: logging.error(traceback.format_exc()) logging.error(\"Bypassing event", "logging import importlib.util import os.path import sys # Debug import traceback from jshbot", "plugin, e=e, error_type=ErrorTypes.STARTUP) else: logging.debug(\"Adding plugin {}\".format(plugin)) valid_plugins[plugin] = [module, plugin_commands] if len(valid_plugins):", "plugins_list = os.listdir(directory) except FileNotFoundError: raise BotException( EXCEPTION, \"Plugins directory not found\", error_type=ErrorTypes.STARTUP)", "itself (renamed to plugin for the user). In addition, this also sets the", "except FileNotFoundError: raise BotException( EXCEPTION, \"Plugins directory not found\", error_type=ErrorTypes.STARTUP) valid_plugins = {}", "FileNotFoundError: raise BotException( EXCEPTION, \"Plugins directory not found\", error_type=ErrorTypes.STARTUP) valid_plugins = {} #", "[module, plugin_commands] if len(valid_plugins): logging.debug(\"Loaded {} plugin(s)\".format(len(valid_plugins))) bot.plugins = valid_plugins def broadcast_event(bot, event,", "{} plugin(s)\".format(len(valid_plugins))) bot.plugins = valid_plugins def broadcast_event(bot, event, *args, **kwargs): \"\"\" Loops through", "error_type=ErrorTypes.STARTUP) else: logging.debug(\"Adding plugin {}\".format(plugin)) valid_plugins[plugin] = [module, plugin_commands] if len(valid_plugins): logging.debug(\"Loaded {}", "module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) plugin_commands = module.get_commands() commands.add_commands(bot, plugin_commands, module) except Exception as", "function: try: asyncio.ensure_future(function(bot, *args, **kwargs)) except TypeError as e: logging.error(traceback.format_exc()) logging.error(\"Bypassing event error:", "for plugin in plugins_list: if (plugin[0] in ('.', '_') or plugin == 'base'", "plugin for the user). In addition, this also sets the commands given by", "commands from jshbot.exceptions import ErrorTypes, BotException EXCEPTION = 'Plugins' def add_plugins(bot): \"\"\" Gets", "name and the module itself (renamed to plugin for the user). In addition,", "as e: traceback.print_exc() raise BotException( EXCEPTION, \"Failed to import external plugin\", plugin, e=e,", "if os.path.isdir(data_directory): logging.debug(\"Setting plugin_data as plugin import path.\") sys.path.append(data_directory) try: plugins_list = os.listdir(directory)", "if (plugin[0] in ('.', '_') or plugin == 'base' or not plugin.endswith('.py')): continue", "the plugin name and the module itself (renamed to plugin for the user).", "event, None) if function: try: asyncio.ensure_future(function(bot, *args, **kwargs)) except TypeError as e: logging.error(traceback.format_exc())", "import path.\") sys.path.append(data_directory) try: plugins_list = os.listdir(directory) except FileNotFoundError: raise BotException( EXCEPTION, \"Plugins", "in ('.', '_') or plugin == 'base' or not plugin.endswith('.py')): continue try: spec", "plugin in plugins_list: if (plugin[0] in ('.', '_') or plugin == 'base' or", "= [base, plugin_commands] # Get plugin commands for plugin in plugins_list: if (plugin[0]", "import external plugin\", plugin, e=e, error_type=ErrorTypes.STARTUP) else: logging.debug(\"Adding plugin {}\".format(plugin)) valid_plugins[plugin] = [module,", "In addition, this also sets the commands given by each plugin. \"\"\" directory", "of the plugins and stores them as a key/value pair of the plugin", "commands given by each plugin. \"\"\" directory = '{}/plugins'.format(bot.path) data_directory = '{}/plugins/plugin_data'.format(bot.path) if", "= importlib.util.module_from_spec(spec) spec.loader.exec_module(module) plugin_commands = module.get_commands() commands.add_commands(bot, plugin_commands, module) except Exception as e:", "function = getattr(plugin[0], event, None) if function: try: asyncio.ensure_future(function(bot, *args, **kwargs)) except TypeError", "'base' or not plugin.endswith('.py')): continue try: spec = importlib.util.spec_from_file_location( plugin, '{}/{}'.format(directory, plugin)) module", "\"\"\" directory = '{}/plugins'.format(bot.path) data_directory = '{}/plugins/plugin_data'.format(bot.path) if os.path.isdir(data_directory): logging.debug(\"Setting plugin_data as plugin", "data_directory = '{}/plugins/plugin_data'.format(bot.path) if os.path.isdir(data_directory): logging.debug(\"Setting plugin_data as plugin import path.\") sys.path.append(data_directory) try:", "base plugin from jshbot import base plugin_commands = base.get_commands() commands.add_commands(bot, plugin_commands, base) valid_plugins['base']", "plugin from jshbot import base plugin_commands = base.get_commands() commands.add_commands(bot, plugin_commands, base) valid_plugins['base'] =", "plugin\", plugin, e=e, error_type=ErrorTypes.STARTUP) else: logging.debug(\"Adding plugin {}\".format(plugin)) valid_plugins[plugin] = [module, plugin_commands] if", "to see if the event index specified is associated it. If it is,", "to plugin for the user). In addition, this also sets the commands given", "path.\") sys.path.append(data_directory) try: plugins_list = os.listdir(directory) except FileNotFoundError: raise BotException( EXCEPTION, \"Plugins directory", "base plugin_commands = base.get_commands() commands.add_commands(bot, plugin_commands, base) valid_plugins['base'] = [base, plugin_commands] # Get", "raise BotException( EXCEPTION, \"Failed to import external plugin\", plugin, e=e, error_type=ErrorTypes.STARTUP) else: logging.debug(\"Adding", "getattr(plugin[0], event, None) if function: try: asyncio.ensure_future(function(bot, *args, **kwargs)) except TypeError as e:", "*args, **kwargs): \"\"\" Loops through all of the plugins and looks to see", "os.path import sys # Debug import traceback from jshbot import commands from jshbot.exceptions", "commands.add_commands(bot, plugin_commands, base) valid_plugins['base'] = [base, plugin_commands] # Get plugin commands for plugin", "BotException( EXCEPTION, \"Plugins directory not found\", error_type=ErrorTypes.STARTUP) valid_plugins = {} # Add base", "of the plugin name and the module itself (renamed to plugin for the", "plugin_commands, module) except Exception as e: traceback.print_exc() raise BotException( EXCEPTION, \"Failed to import", "function with args. \"\"\" for plugin in bot.plugins.values(): function = getattr(plugin[0], event, None)", "plugin in bot.plugins.values(): function = getattr(plugin[0], event, None) if function: try: asyncio.ensure_future(function(bot, *args,", "Add base plugin from jshbot import base plugin_commands = base.get_commands() commands.add_commands(bot, plugin_commands, base)", "= base.get_commands() commands.add_commands(bot, plugin_commands, base) valid_plugins['base'] = [base, plugin_commands] # Get plugin commands", "in plugins_list: if (plugin[0] in ('.', '_') or plugin == 'base' or not", "continue try: spec = importlib.util.spec_from_file_location( plugin, '{}/{}'.format(directory, plugin)) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) plugin_commands", "plugins and stores them as a key/value pair of the plugin name and", "module.get_commands() commands.add_commands(bot, plugin_commands, module) except Exception as e: traceback.print_exc() raise BotException( EXCEPTION, \"Failed", "pair of the plugin name and the module itself (renamed to plugin for", "directory = '{}/plugins'.format(bot.path) data_directory = '{}/plugins/plugin_data'.format(bot.path) if os.path.isdir(data_directory): logging.debug(\"Setting plugin_data as plugin import", "except Exception as e: traceback.print_exc() raise BotException( EXCEPTION, \"Failed to import external plugin\",", "traceback.print_exc() raise BotException( EXCEPTION, \"Failed to import external plugin\", plugin, e=e, error_type=ErrorTypes.STARTUP) else:", "== 'base' or not plugin.endswith('.py')): continue try: spec = importlib.util.spec_from_file_location( plugin, '{}/{}'.format(directory, plugin))", "plugin_commands] if len(valid_plugins): logging.debug(\"Loaded {} plugin(s)\".format(len(valid_plugins))) bot.plugins = valid_plugins def broadcast_event(bot, event, *args,", "specified is associated it. If it is, call that function with args. \"\"\"", "= getattr(plugin[0], event, None) if function: try: asyncio.ensure_future(function(bot, *args, **kwargs)) except TypeError as", "importlib.util import os.path import sys # Debug import traceback from jshbot import commands", "from jshbot import base plugin_commands = base.get_commands() commands.add_commands(bot, plugin_commands, base) valid_plugins['base'] = [base,", "that function with args. \"\"\" for plugin in bot.plugins.values(): function = getattr(plugin[0], event,", "= module.get_commands() commands.add_commands(bot, plugin_commands, module) except Exception as e: traceback.print_exc() raise BotException( EXCEPTION,", "external plugin\", plugin, e=e, error_type=ErrorTypes.STARTUP) else: logging.debug(\"Adding plugin {}\".format(plugin)) valid_plugins[plugin] = [module, plugin_commands]", "commands for plugin in plugins_list: if (plugin[0] in ('.', '_') or plugin ==", "import base plugin_commands = base.get_commands() commands.add_commands(bot, plugin_commands, base) valid_plugins['base'] = [base, plugin_commands] #", "'{}/{}'.format(directory, plugin)) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) plugin_commands = module.get_commands() commands.add_commands(bot, plugin_commands, module) except", "= [module, plugin_commands] if len(valid_plugins): logging.debug(\"Loaded {} plugin(s)\".format(len(valid_plugins))) bot.plugins = valid_plugins def broadcast_event(bot,", "try: plugins_list = os.listdir(directory) except FileNotFoundError: raise BotException( EXCEPTION, \"Plugins directory not found\",", "them as a key/value pair of the plugin name and the module itself", "# Get plugin commands for plugin in plugins_list: if (plugin[0] in ('.', '_')", "each plugin. \"\"\" directory = '{}/plugins'.format(bot.path) data_directory = '{}/plugins/plugin_data'.format(bot.path) if os.path.isdir(data_directory): logging.debug(\"Setting plugin_data", "user). In addition, this also sets the commands given by each plugin. \"\"\"", "valid_plugins = {} # Add base plugin from jshbot import base plugin_commands =", "plugin(s)\".format(len(valid_plugins))) bot.plugins = valid_plugins def broadcast_event(bot, event, *args, **kwargs): \"\"\" Loops through all", "plugin_commands, base) valid_plugins['base'] = [base, plugin_commands] # Get plugin commands for plugin in", "of all of the plugins and stores them as a key/value pair of", "if len(valid_plugins): logging.debug(\"Loaded {} plugin(s)\".format(len(valid_plugins))) bot.plugins = valid_plugins def broadcast_event(bot, event, *args, **kwargs):", "if the event index specified is associated it. If it is, call that", "import os.path import sys # Debug import traceback from jshbot import commands from", "= os.listdir(directory) except FileNotFoundError: raise BotException( EXCEPTION, \"Plugins directory not found\", error_type=ErrorTypes.STARTUP) valid_plugins", "'{}/plugins/plugin_data'.format(bot.path) if os.path.isdir(data_directory): logging.debug(\"Setting plugin_data as plugin import path.\") sys.path.append(data_directory) try: plugins_list =", "= valid_plugins def broadcast_event(bot, event, *args, **kwargs): \"\"\" Loops through all of the", "the commands given by each plugin. \"\"\" directory = '{}/plugins'.format(bot.path) data_directory = '{}/plugins/plugin_data'.format(bot.path)", "try: asyncio.ensure_future(function(bot, *args, **kwargs)) except TypeError as e: logging.error(traceback.format_exc()) logging.error(\"Bypassing event error: \"", "If it is, call that function with args. \"\"\" for plugin in bot.plugins.values():", "plugin_commands] # Get plugin commands for plugin in plugins_list: if (plugin[0] in ('.',", "plugin == 'base' or not plugin.endswith('.py')): continue try: spec = importlib.util.spec_from_file_location( plugin, '{}/{}'.format(directory,", "stores them as a key/value pair of the plugin name and the module", "a list of all of the plugins and stores them as a key/value", "not found\", error_type=ErrorTypes.STARTUP) valid_plugins = {} # Add base plugin from jshbot import", "logging.debug(\"Setting plugin_data as plugin import path.\") sys.path.append(data_directory) try: plugins_list = os.listdir(directory) except FileNotFoundError:", "plugins_list: if (plugin[0] in ('.', '_') or plugin == 'base' or not plugin.endswith('.py')):", "logging.debug(\"Adding plugin {}\".format(plugin)) valid_plugins[plugin] = [module, plugin_commands] if len(valid_plugins): logging.debug(\"Loaded {} plugin(s)\".format(len(valid_plugins))) bot.plugins", "import asyncio import logging import importlib.util import os.path import sys # Debug import", "sys.path.append(data_directory) try: plugins_list = os.listdir(directory) except FileNotFoundError: raise BotException( EXCEPTION, \"Plugins directory not", "as plugin import path.\") sys.path.append(data_directory) try: plugins_list = os.listdir(directory) except FileNotFoundError: raise BotException(", "('.', '_') or plugin == 'base' or not plugin.endswith('.py')): continue try: spec =", "the user). In addition, this also sets the commands given by each plugin.", "spec = importlib.util.spec_from_file_location( plugin, '{}/{}'.format(directory, plugin)) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) plugin_commands = module.get_commands()", "plugin.endswith('.py')): continue try: spec = importlib.util.spec_from_file_location( plugin, '{}/{}'.format(directory, plugin)) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module)", "= '{}/plugins/plugin_data'.format(bot.path) if os.path.isdir(data_directory): logging.debug(\"Setting plugin_data as plugin import path.\") sys.path.append(data_directory) try: plugins_list", "plugin_data as plugin import path.\") sys.path.append(data_directory) try: plugins_list = os.listdir(directory) except FileNotFoundError: raise", "associated it. If it is, call that function with args. \"\"\" for plugin", "in bot.plugins.values(): function = getattr(plugin[0], event, None) if function: try: asyncio.ensure_future(function(bot, *args, **kwargs))", "os.path.isdir(data_directory): logging.debug(\"Setting plugin_data as plugin import path.\") sys.path.append(data_directory) try: plugins_list = os.listdir(directory) except", "plugin. \"\"\" directory = '{}/plugins'.format(bot.path) data_directory = '{}/plugins/plugin_data'.format(bot.path) if os.path.isdir(data_directory): logging.debug(\"Setting plugin_data as", "\"Plugins directory not found\", error_type=ErrorTypes.STARTUP) valid_plugins = {} # Add base plugin from", "from jshbot import commands from jshbot.exceptions import ErrorTypes, BotException EXCEPTION = 'Plugins' def", "as a key/value pair of the plugin name and the module itself (renamed", "directory not found\", error_type=ErrorTypes.STARTUP) valid_plugins = {} # Add base plugin from jshbot", "try: spec = importlib.util.spec_from_file_location( plugin, '{}/{}'.format(directory, plugin)) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) plugin_commands =", "error_type=ErrorTypes.STARTUP) valid_plugins = {} # Add base plugin from jshbot import base plugin_commands", "import sys # Debug import traceback from jshbot import commands from jshbot.exceptions import", "= 'Plugins' def add_plugins(bot): \"\"\" Gets a list of all of the plugins", "= '{}/plugins'.format(bot.path) data_directory = '{}/plugins/plugin_data'.format(bot.path) if os.path.isdir(data_directory): logging.debug(\"Setting plugin_data as plugin import path.\")", "bot.plugins = valid_plugins def broadcast_event(bot, event, *args, **kwargs): \"\"\" Loops through all of", "broadcast_event(bot, event, *args, **kwargs): \"\"\" Loops through all of the plugins and looks", "index specified is associated it. If it is, call that function with args.", "it is, call that function with args. \"\"\" for plugin in bot.plugins.values(): function", "by each plugin. \"\"\" directory = '{}/plugins'.format(bot.path) data_directory = '{}/plugins/plugin_data'.format(bot.path) if os.path.isdir(data_directory): logging.debug(\"Setting", "# Add base plugin from jshbot import base plugin_commands = base.get_commands() commands.add_commands(bot, plugin_commands,", "plugin commands for plugin in plugins_list: if (plugin[0] in ('.', '_') or plugin", "module) except Exception as e: traceback.print_exc() raise BotException( EXCEPTION, \"Failed to import external", "call that function with args. \"\"\" for plugin in bot.plugins.values(): function = getattr(plugin[0],", "'_') or plugin == 'base' or not plugin.endswith('.py')): continue try: spec = importlib.util.spec_from_file_location(" ]
[ "about the wildcard expansion is to use the phrase \"any item A must", "turkey 5. almond_pie, pecan_pie 6. coffee Warning: Rice does not have any ordering.", "appetizers, but before desserts. Other foods are more simple, like a pecan pie,", "character), while food-relationships are two food items that are space delimited. All food-items", "that do not have a relationship must be printed with a warning or", "combination of strings that fit the wildcard. For example, using the items from", "food-relationship is where the first item must be served before the second item.", "and M. N is the number of food items, while M is the", "expansion is to use the phrase \"any item A must be before any", "turkey* crab_cakes salad ## Sample Output 2 1. crab_cakes 2. salad 3. turkey_stuffing", "can be read as \"any pie must be before coffee\". Some orderings may", "of food items, while M is the number of food-relationships. Food-items are unique", "## Sample Output 2 1. crab_cakes 2. salad 3. turkey_stuffing 4. turkey 5.", "first. Work through sample data 2 by hand to better understand the ordering", "ambiguous: you might have two desserts before coffee, but the ordering of desserts", "that might be hard to understand at first. Work through sample data 2", "the items together. ## Output Description Print the correct order of food-items with", "in this schedule, write an error message for it. # Formal Inputs &", "with optional underscores (the '_' character), while food-relationships are two food items that", "have, print the banquet schedule. If a given food item cannot be placed", "'\\*'. You must support this by expanding the rule to fulfill any combination", "before the second item. Note that in the food-relationships list, some food-item names", "on their own lines afterwards. A food-relationship is where the first item must", "## Sample Output 1 1. salad 2. turkey 3. dessert ## Sample Input", "a warning or error message. # Sample Inputs & Outputs ## Sample Input", "& Outputs ## Sample Input 1 3 3 salad turkey dessert salad dessert", "main meal. Given a list of foods and the order-relationships they have, print", "they have, print the banquet schedule. If a given food item cannot be", "\"any pie must be before coffee\". Some orderings may be ambiguous: you might", "be served before the second item. Note that in the food-relationships list, some", "Work through sample data 2 by hand to better understand the ordering rules", "food will be served. Some food, like a turkey, have to be served", "\"\\*pie coffee\", which can be read as \"any pie must be before coffee\".", "the first item must be served before the second item. Note that in", "Output 1 1. salad 2. turkey 3. dessert ## Sample Input 2 8", "Sample Output 1 1. salad 2. turkey 3. dessert ## Sample Input 2", "any ordering. # Author's Note: This challenge has some subtle ordering logic that", "will be listed on their own lines afterwards. A food-relationship is where the", "may not be explicit. In such a case, group the items together. ##", "crab_cakes 2. salad 3. turkey_stuffing 4. turkey 5. almond_pie, pecan_pie 6. coffee Warning:", "banquet, but need to figure out the order in which food will be", "[wildcard-character](http://en.wikipedia.org/wiki/Wildcard_character) '\\*'. You must support this by expanding the rule to fulfill any", "the main meal. Given a list of foods and the order-relationships they have,", "almond_pie rice coffee turkey_stuffing turkey_stuffing turkey turkey* *_pie *pie coffee salad turkey* crab_cakes", "desserts. Other foods are more simple, like a pecan pie, which can be", "of food-items with a preceding index, starting from 1. If there are ambiguous", "the same line as a comma-delimited array of food-items. Any items that do", "with a warning or error message. # Sample Inputs & Outputs ## Sample", "some subtle ordering logic that might be hard to understand at first. Work", "all widecard rules as well. \"\"\" def main(): pass if __name__ == \"__main__\":", "a case, group the items together. ## Output Description Print the correct order", "item. Note that in the food-relationships list, some food-item names can use the", "example, using the items from Sample Input 2, the rule \"turkey\\* \\*_pie\" expands", "Output Description Print the correct order of food-items with a preceding index, starting", "a turkey, have to be served after appetizers, but before desserts. Other foods", "Input 1 3 3 salad turkey dessert salad dessert turkey dessert salad turkey", "Banquet Planning https://www.reddit.com/r/dailyprogrammer/comments/1rnrs2/112813_challenge_137_intermediate_hard_banquet/ # [](#IntermediateIcon) *(Intermediate)*: Banquet Planning You and your friends are", "turkey turkey* *_pie *pie coffee salad turkey* crab_cakes salad ## Sample Output 2", "If there are ambiguous ordering for items, list them together on the same", "food-relationships list, some food-item names can use the [wildcard-character](http://en.wikipedia.org/wiki/Wildcard_character) '\\*'. You must support", "might be hard to understand at first. Work through sample data 2 by", "such a case, group the items together. ## Output Description Print the correct", "unique single-word lower-case names with optional underscores (the '_' character), while food-relationships are", "rule \"turkey\\* \\*_pie\" expands to the following four rules: turkey almond_pie turkey_stuffing almond_pie", "to think about the wildcard expansion is to use the phrase \"any item", "Rice does not have any ordering. # Author's Note: This challenge has some", "have two desserts before coffee, but the ordering of desserts may not be", "food, like a turkey, have to be served after appetizers, but before desserts.", "fit the wildcard. For example, using the items from Sample Input 2, the", "a list of foods and the order-relationships they have, print the banquet schedule.", "but before desserts. Other foods are more simple, like a pecan pie, which", "names can use the [wildcard-character](http://en.wikipedia.org/wiki/Wildcard_character) '\\*'. You must support this by expanding the", "printed with a warning or error message. # Sample Inputs & Outputs ##", "salad turkey dessert salad dessert turkey dessert salad turkey ## Sample Output 1", "turkey ## Sample Output 1 1. salad 2. turkey 3. dessert ## Sample", "are two food items that are space delimited. All food-items will be listed", "ordering of desserts may not be explicit. In such a case, group the", "Warning: Rice does not have any ordering. # Author's Note: This challenge has", "food items, while M is the number of food-relationships. Food-items are unique single-word", "four rules: turkey almond_pie turkey_stuffing almond_pie turkey pecan_pie turkey_stuffing pecan_pie A helpful way", "Output 2 1. crab_cakes 2. salad 3. turkey_stuffing 4. turkey 5. almond_pie, pecan_pie", "coffee turkey_stuffing turkey_stuffing turkey turkey* *_pie *pie coffee salad turkey* crab_cakes salad ##", "of foods and the order-relationships they have, print the banquet schedule. If a", "and the order-relationships they have, print the banquet schedule. If a given food", "number of food items, while M is the number of food-relationships. Food-items are", "turkey* *_pie *pie coffee salad turkey* crab_cakes salad ## Sample Output 2 1.", "code. Make sure to expand all widecard rules as well. \"\"\" def main():", "the number of food-relationships. Food-items are unique single-word lower-case names with optional underscores", "two food items that are space delimited. All food-items will be listed first", "Planning https://www.reddit.com/r/dailyprogrammer/comments/1rnrs2/112813_challenge_137_intermediate_hard_banquet/ # [](#IntermediateIcon) *(Intermediate)*: Banquet Planning You and your friends are planning", "Make sure to expand all widecard rules as well. \"\"\" def main(): pass", "you will be given two space-delimited integers, N and M. N is the", "A food-relationship is where the first item must be served before the second", "orderings may be ambiguous: you might have two desserts before coffee, but the", "must be before coffee\". Some orderings may be ambiguous: you might have two", "and your friends are planning a big banquet, but need to figure out", "use the phrase \"any item A must be before any item B\". An", "list, some food-item names can use the [wildcard-character](http://en.wikipedia.org/wiki/Wildcard_character) '\\*'. You must support this", "Challenge #137 [Intermediate / Hard] Banquet Planning https://www.reddit.com/r/dailyprogrammer/comments/1rnrs2/112813_challenge_137_intermediate_hard_banquet/ # [](#IntermediateIcon) *(Intermediate)*: Banquet Planning", "the order in which food will be served. Some food, like a turkey,", "hand to better understand the ordering rules before writing code. Make sure to", "given food item cannot be placed in this schedule, write an error message", "served before the second item. Note that in the food-relationships list, some food-item", "write an error message for it. # Formal Inputs & Outputs ## Input", "use the [wildcard-character](http://en.wikipedia.org/wiki/Wildcard_character) '\\*'. You must support this by expanding the rule to", "first item must be served before the second item. Note that in the", "that are space delimited. All food-items will be listed first on their own", "[11/28/13] Challenge #137 [Intermediate / Hard] Banquet Planning https://www.reddit.com/r/dailyprogrammer/comments/1rnrs2/112813_challenge_137_intermediate_hard_banquet/ # [](#IntermediateIcon) *(Intermediate)*: Banquet", "console input, you will be given two space-delimited integers, N and M. N", "2, the rule \"turkey\\* \\*_pie\" expands to the following four rules: turkey almond_pie", "Sample Inputs & Outputs ## Sample Input 1 3 3 salad turkey dessert", "salad 3. turkey_stuffing 4. turkey 5. almond_pie, pecan_pie 6. coffee Warning: Rice does", "space delimited. All food-items will be listed first on their own lines, then", "not have a relationship must be printed with a warning or error message.", "the rule to fulfill any combination of strings that fit the wildcard. For", "the second item. Note that in the food-relationships list, some food-item names can", "items, list them together on the same line as a comma-delimited array of", "names with optional underscores (the '_' character), while food-relationships are two food items", "with a preceding index, starting from 1. If there are ambiguous ordering for", "food-relationship \"\\*pie coffee\", which can be read as \"any pie must be before", "expands to the following four rules: turkey almond_pie turkey_stuffing almond_pie turkey pecan_pie turkey_stuffing", "space-delimited integers, N and M. N is the number of food items, while", "For example, using the items from Sample Input 2, the rule \"turkey\\* \\*_pie\"", "data 2 by hand to better understand the ordering rules before writing code.", "a comma-delimited array of food-items. Any items that do not have a relationship", "If a given food item cannot be placed in this schedule, write an", "two desserts before coffee, but the ordering of desserts may not be explicit.", "the food-relationship \"\\*pie coffee\", which can be read as \"any pie must be", "be listed on their own lines afterwards. A food-relationship is where the first", "can use the [wildcard-character](http://en.wikipedia.org/wiki/Wildcard_character) '\\*'. You must support this by expanding the rule", "the ordering of desserts may not be explicit. In such a case, group", "are space delimited. All food-items will be listed first on their own lines,", "some food-item names can use the [wildcard-character](http://en.wikipedia.org/wiki/Wildcard_character) '\\*'. You must support this by", "turkey 3. dessert ## Sample Input 2 8 5 turkey pecan_pie salad crab_cakes", "Sample Input 2 8 5 turkey pecan_pie salad crab_cakes almond_pie rice coffee turkey_stuffing", "2. salad 3. turkey_stuffing 4. turkey 5. almond_pie, pecan_pie 6. coffee Warning: Rice", "schedule, write an error message for it. # Formal Inputs & Outputs ##", "3 salad turkey dessert salad dessert turkey dessert salad turkey ## Sample Output", "food-item names can use the [wildcard-character](http://en.wikipedia.org/wiki/Wildcard_character) '\\*'. You must support this by expanding", "your friends are planning a big banquet, but need to figure out the", "be the food-relationship \"\\*pie coffee\", which can be read as \"any pie must", "the [wildcard-character](http://en.wikipedia.org/wiki/Wildcard_character) '\\*'. You must support this by expanding the rule to fulfill", "In such a case, group the items together. ## Output Description Print the", "wildcard. For example, using the items from Sample Input 2, the rule \"turkey\\*", "Sample Input 1 3 3 salad turkey dessert salad dessert turkey dessert salad", "of desserts may not be explicit. In such a case, group the items", "expand all widecard rules as well. \"\"\" def main(): pass if __name__ ==", "is where the first item must be served before the second item. Note", "are unique single-word lower-case names with optional underscores (the '_' character), while food-relationships", "food-relationships are two food items that are space delimited. All food-items will be", "error message. # Sample Inputs & Outputs ## Sample Input 1 3 3", "\\*_pie\" expands to the following four rules: turkey almond_pie turkey_stuffing almond_pie turkey pecan_pie", "the items from Sample Input 2, the rule \"turkey\\* \\*_pie\" expands to the", "coffee Warning: Rice does not have any ordering. # Author's Note: This challenge", "# [](#IntermediateIcon) *(Intermediate)*: Banquet Planning You and your friends are planning a big", "together. ## Output Description Print the correct order of food-items with a preceding", "salad 2. turkey 3. dessert ## Sample Input 2 8 5 turkey pecan_pie", "phrase \"any item A must be before any item B\". An example would", "then all food-relationships will be listed on their own lines afterwards. A food-relationship", "items that are space delimited. All food-items will be listed first on their", "be printed with a warning or error message. # Sample Inputs & Outputs", "turkey dessert salad dessert turkey dessert salad turkey ## Sample Output 1 1.", "starting from 1. If there are ambiguous ordering for items, list them together", "has some subtle ordering logic that might be hard to understand at first.", "Given a list of foods and the order-relationships they have, print the banquet", "before desserts. Other foods are more simple, like a pecan pie, which can", "\"turkey\\* \\*_pie\" expands to the following four rules: turkey almond_pie turkey_stuffing almond_pie turkey", "subtle ordering logic that might be hard to understand at first. Work through", "salad turkey* crab_cakes salad ## Sample Output 2 1. crab_cakes 2. salad 3.", "first on their own lines, then all food-relationships will be listed on their", "message for it. # Formal Inputs & Outputs ## Input Description On standard", "placed in this schedule, write an error message for it. # Formal Inputs", "8 5 turkey pecan_pie salad crab_cakes almond_pie rice coffee turkey_stuffing turkey_stuffing turkey turkey*", "the rule \"turkey\\* \\*_pie\" expands to the following four rules: turkey almond_pie turkey_stuffing", "where the first item must be served before the second item. Note that", "On standard console input, you will be given two space-delimited integers, N and", "[](#IntermediateIcon) *(Intermediate)*: Banquet Planning You and your friends are planning a big banquet,", "print the banquet schedule. If a given food item cannot be placed in", "ordering for items, list them together on the same line as a comma-delimited", "input, you will be given two space-delimited integers, N and M. N is", "the number of food items, while M is the number of food-relationships. Food-items", "after appetizers, but before desserts. Other foods are more simple, like a pecan", "Other foods are more simple, like a pecan pie, which can be eaten", "group the items together. ## Output Description Print the correct order of food-items", "salad dessert turkey dessert salad turkey ## Sample Output 1 1. salad 2.", "2 by hand to better understand the ordering rules before writing code. Make", "dessert ## Sample Input 2 8 5 turkey pecan_pie salad crab_cakes almond_pie rice", "almond_pie turkey_stuffing almond_pie turkey pecan_pie turkey_stuffing pecan_pie A helpful way to think about", "4. turkey 5. almond_pie, pecan_pie 6. coffee Warning: Rice does not have any", "not be explicit. In such a case, group the items together. ## Output", "after the main meal. Given a list of foods and the order-relationships they", "\"\"\" [11/28/13] Challenge #137 [Intermediate / Hard] Banquet Planning https://www.reddit.com/r/dailyprogrammer/comments/1rnrs2/112813_challenge_137_intermediate_hard_banquet/ # [](#IntermediateIcon) *(Intermediate)*:", "be placed in this schedule, write an error message for it. # Formal", "coffee salad turkey* crab_cakes salad ## Sample Output 2 1. crab_cakes 2. salad", "read as \"any pie must be before coffee\". Some orderings may be ambiguous:", "hard to understand at first. Work through sample data 2 by hand to", "turkey pecan_pie turkey_stuffing pecan_pie A helpful way to think about the wildcard expansion", "a big banquet, but need to figure out the order in which food", "it. # Formal Inputs & Outputs ## Input Description On standard console input,", "turkey_stuffing turkey_stuffing turkey turkey* *_pie *pie coffee salad turkey* crab_cakes salad ## Sample", "big banquet, but need to figure out the order in which food will", "friends are planning a big banquet, but need to figure out the order", "but need to figure out the order in which food will be served.", "Description On standard console input, you will be given two space-delimited integers, N", "ambiguous ordering for items, list them together on the same line as a", "be before coffee\". Some orderings may be ambiguous: you might have two desserts", "this schedule, write an error message for it. # Formal Inputs & Outputs", "Note that in the food-relationships list, some food-item names can use the [wildcard-character](http://en.wikipedia.org/wiki/Wildcard_character)", "Input 2, the rule \"turkey\\* \\*_pie\" expands to the following four rules: turkey", "https://www.reddit.com/r/dailyprogrammer/comments/1rnrs2/112813_challenge_137_intermediate_hard_banquet/ # [](#IntermediateIcon) *(Intermediate)*: Banquet Planning You and your friends are planning a", "salad turkey ## Sample Output 1 1. salad 2. turkey 3. dessert ##", "Sample Input 2, the rule \"turkey\\* \\*_pie\" expands to the following four rules:", "turkey_stuffing turkey turkey* *_pie *pie coffee salad turkey* crab_cakes salad ## Sample Output", "that fit the wildcard. For example, using the items from Sample Input 2,", "schedule. If a given food item cannot be placed in this schedule, write", "## Output Description Print the correct order of food-items with a preceding index,", "served after appetizers, but before desserts. Other foods are more simple, like a", "must be printed with a warning or error message. # Sample Inputs &", "before coffee\". Some orderings may be ambiguous: you might have two desserts before", "index, starting from 1. If there are ambiguous ordering for items, list them", "# Formal Inputs & Outputs ## Input Description On standard console input, you", "& Outputs ## Input Description On standard console input, you will be given", "example would be the food-relationship \"\\*pie coffee\", which can be read as \"any", "writing code. Make sure to expand all widecard rules as well. \"\"\" def", "*_pie *pie coffee salad turkey* crab_cakes salad ## Sample Output 2 1. crab_cakes", "listed first on their own lines, then all food-relationships will be listed on", "same line as a comma-delimited array of food-items. Any items that do not", "from 1. If there are ambiguous ordering for items, list them together on", "foods and the order-relationships they have, print the banquet schedule. If a given", "the correct order of food-items with a preceding index, starting from 1. If", "Formal Inputs & Outputs ## Input Description On standard console input, you will", "salad ## Sample Output 2 1. crab_cakes 2. salad 3. turkey_stuffing 4. turkey", "integers, N and M. N is the number of food items, while M", "correct order of food-items with a preceding index, starting from 1. If there", "a given food item cannot be placed in this schedule, write an error", "pecan_pie turkey_stuffing pecan_pie A helpful way to think about the wildcard expansion is", "1. If there are ambiguous ordering for items, list them together on the", "any item B\". An example would be the food-relationship \"\\*pie coffee\", which can", "M. N is the number of food items, while M is the number", "/ Hard] Banquet Planning https://www.reddit.com/r/dailyprogrammer/comments/1rnrs2/112813_challenge_137_intermediate_hard_banquet/ # [](#IntermediateIcon) *(Intermediate)*: Banquet Planning You and your", "support this by expanding the rule to fulfill any combination of strings that", "pie, which can be eaten any time after the main meal. Given a", "items that do not have a relationship must be printed with a warning", "Outputs ## Sample Input 1 3 3 salad turkey dessert salad dessert turkey", "coffee\", which can be read as \"any pie must be before coffee\". Some", "delimited. All food-items will be listed first on their own lines, then all", "figure out the order in which food will be served. Some food, like", "must be served before the second item. Note that in the food-relationships list,", "a pecan pie, which can be eaten any time after the main meal.", "ordering logic that might be hard to understand at first. Work through sample", "to fulfill any combination of strings that fit the wildcard. For example, using", "## Sample Input 1 3 3 salad turkey dessert salad dessert turkey dessert", "need to figure out the order in which food will be served. Some", "list of foods and the order-relationships they have, print the banquet schedule. If", "3. dessert ## Sample Input 2 8 5 turkey pecan_pie salad crab_cakes almond_pie", "be ambiguous: you might have two desserts before coffee, but the ordering of", "rule to fulfill any combination of strings that fit the wildcard. For example,", "any time after the main meal. Given a list of foods and the", "number of food-relationships. Food-items are unique single-word lower-case names with optional underscores (the", "on their own lines, then all food-relationships will be listed on their own", "items together. ## Output Description Print the correct order of food-items with a", "more simple, like a pecan pie, which can be eaten any time after", "must be before any item B\". An example would be the food-relationship \"\\*pie", "relationship must be printed with a warning or error message. # Sample Inputs", "of food-relationships. Food-items are unique single-word lower-case names with optional underscores (the '_'", "1 1. salad 2. turkey 3. dessert ## Sample Input 2 8 5", "Some orderings may be ambiguous: you might have two desserts before coffee, but", "be given two space-delimited integers, N and M. N is the number of", "dessert salad dessert turkey dessert salad turkey ## Sample Output 1 1. salad", "pecan_pie 6. coffee Warning: Rice does not have any ordering. # Author's Note:", "banquet schedule. If a given food item cannot be placed in this schedule,", "desserts before coffee, but the ordering of desserts may not be explicit. In", "<gh_stars>1-10 \"\"\" [11/28/13] Challenge #137 [Intermediate / Hard] Banquet Planning https://www.reddit.com/r/dailyprogrammer/comments/1rnrs2/112813_challenge_137_intermediate_hard_banquet/ # [](#IntermediateIcon)", "will be served. Some food, like a turkey, have to be served after", "may be ambiguous: you might have two desserts before coffee, but the ordering", "preceding index, starting from 1. If there are ambiguous ordering for items, list", "lines, then all food-relationships will be listed on their own lines afterwards. A", "# Sample Inputs & Outputs ## Sample Input 1 3 3 salad turkey", "lower-case names with optional underscores (the '_' character), while food-relationships are two food", "turkey_stuffing pecan_pie A helpful way to think about the wildcard expansion is to", "comma-delimited array of food-items. Any items that do not have a relationship must", "eaten any time after the main meal. Given a list of foods and", "N and M. N is the number of food items, while M is", "have to be served after appetizers, but before desserts. Other foods are more", "own lines afterwards. A food-relationship is where the first item must be served", "do not have a relationship must be printed with a warning or error", "as a comma-delimited array of food-items. Any items that do not have a", "are planning a big banquet, but need to figure out the order in", "Print the correct order of food-items with a preceding index, starting from 1.", "by expanding the rule to fulfill any combination of strings that fit the", "the wildcard. For example, using the items from Sample Input 2, the rule", "Planning You and your friends are planning a big banquet, but need to", "underscores (the '_' character), while food-relationships are two food items that are space", "order of food-items with a preceding index, starting from 1. If there are", "5. almond_pie, pecan_pie 6. coffee Warning: Rice does not have any ordering. #", "turkey_stuffing 4. turkey 5. almond_pie, pecan_pie 6. coffee Warning: Rice does not have", "widecard rules as well. \"\"\" def main(): pass if __name__ == \"__main__\": main()", "case, group the items together. ## Output Description Print the correct order of", "pecan_pie A helpful way to think about the wildcard expansion is to use", "lines afterwards. A food-relationship is where the first item must be served before", "5 turkey pecan_pie salad crab_cakes almond_pie rice coffee turkey_stuffing turkey_stuffing turkey turkey* *_pie", "be hard to understand at first. Work through sample data 2 by hand", "the ordering rules before writing code. Make sure to expand all widecard rules", "to expand all widecard rules as well. \"\"\" def main(): pass if __name__", "or error message. # Sample Inputs & Outputs ## Sample Input 1 3", "will be given two space-delimited integers, N and M. N is the number", "turkey dessert salad turkey ## Sample Output 1 1. salad 2. turkey 3.", "crab_cakes salad ## Sample Output 2 1. crab_cakes 2. salad 3. turkey_stuffing 4.", "order in which food will be served. Some food, like a turkey, have", "understand at first. Work through sample data 2 by hand to better understand", "pie must be before coffee\". Some orderings may be ambiguous: you might have", "6. coffee Warning: Rice does not have any ordering. # Author's Note: This", "logic that might be hard to understand at first. Work through sample data", "Food-items are unique single-word lower-case names with optional underscores (the '_' character), while", "optional underscores (the '_' character), while food-relationships are two food items that are", "to the following four rules: turkey almond_pie turkey_stuffing almond_pie turkey pecan_pie turkey_stuffing pecan_pie", "together on the same line as a comma-delimited array of food-items. Any items", "1 3 3 salad turkey dessert salad dessert turkey dessert salad turkey ##", "before any item B\". An example would be the food-relationship \"\\*pie coffee\", which", "the wildcard expansion is to use the phrase \"any item A must be", "array of food-items. Any items that do not have a relationship must be", "food items that are space delimited. All food-items will be listed first on", "This challenge has some subtle ordering logic that might be hard to understand", "Inputs & Outputs ## Input Description On standard console input, you will be", "to use the phrase \"any item A must be before any item B\".", "understand the ordering rules before writing code. Make sure to expand all widecard", "through sample data 2 by hand to better understand the ordering rules before", "item A must be before any item B\". An example would be the", "which can be read as \"any pie must be before coffee\". Some orderings", "a relationship must be printed with a warning or error message. # Sample", "pecan_pie salad crab_cakes almond_pie rice coffee turkey_stuffing turkey_stuffing turkey turkey* *_pie *pie coffee", "desserts may not be explicit. In such a case, group the items together.", "does not have any ordering. # Author's Note: This challenge has some subtle", "while food-relationships are two food items that are space delimited. All food-items will", "Note: This challenge has some subtle ordering logic that might be hard to", "error message for it. # Formal Inputs & Outputs ## Input Description On", "*pie coffee salad turkey* crab_cakes salad ## Sample Output 2 1. crab_cakes 2.", "at first. Work through sample data 2 by hand to better understand the", "their own lines, then all food-relationships will be listed on their own lines", "not have any ordering. # Author's Note: This challenge has some subtle ordering", "before coffee, but the ordering of desserts may not be explicit. In such", "N is the number of food items, while M is the number of", "way to think about the wildcard expansion is to use the phrase \"any", "order-relationships they have, print the banquet schedule. If a given food item cannot", "A must be before any item B\". An example would be the food-relationship", "an error message for it. # Formal Inputs & Outputs ## Input Description", "meal. Given a list of foods and the order-relationships they have, print the", "out the order in which food will be served. Some food, like a", "turkey almond_pie turkey_stuffing almond_pie turkey pecan_pie turkey_stuffing pecan_pie A helpful way to think", "the phrase \"any item A must be before any item B\". An example", "better understand the ordering rules before writing code. Make sure to expand all", "item cannot be placed in this schedule, write an error message for it.", "cannot be placed in this schedule, write an error message for it. #", "in the food-relationships list, some food-item names can use the [wildcard-character](http://en.wikipedia.org/wiki/Wildcard_character) '\\*'. You", "you might have two desserts before coffee, but the ordering of desserts may", "will be listed first on their own lines, then all food-relationships will be", "You and your friends are planning a big banquet, but need to figure", "warning or error message. # Sample Inputs & Outputs ## Sample Input 1", "sure to expand all widecard rules as well. \"\"\" def main(): pass if", "from Sample Input 2, the rule \"turkey\\* \\*_pie\" expands to the following four", "time after the main meal. Given a list of foods and the order-relationships", "all food-relationships will be listed on their own lines afterwards. A food-relationship is", "be read as \"any pie must be before coffee\". Some orderings may be", "coffee\". Some orderings may be ambiguous: you might have two desserts before coffee,", "must support this by expanding the rule to fulfill any combination of strings", "food-relationships. Food-items are unique single-word lower-case names with optional underscores (the '_' character),", "the food-relationships list, some food-item names can use the [wildcard-character](http://en.wikipedia.org/wiki/Wildcard_character) '\\*'. You must", "challenge has some subtle ordering logic that might be hard to understand at", "them together on the same line as a comma-delimited array of food-items. Any", "while M is the number of food-relationships. Food-items are unique single-word lower-case names", "can be eaten any time after the main meal. Given a list of", "B\". An example would be the food-relationship \"\\*pie coffee\", which can be read", "in which food will be served. Some food, like a turkey, have to", "expanding the rule to fulfill any combination of strings that fit the wildcard.", "turkey_stuffing almond_pie turkey pecan_pie turkey_stuffing pecan_pie A helpful way to think about the", "food-items with a preceding index, starting from 1. If there are ambiguous ordering", "crab_cakes almond_pie rice coffee turkey_stuffing turkey_stuffing turkey turkey* *_pie *pie coffee salad turkey*", "Sample Output 2 1. crab_cakes 2. salad 3. turkey_stuffing 4. turkey 5. almond_pie,", "is the number of food items, while M is the number of food-relationships.", "food-items. Any items that do not have a relationship must be printed with", "is to use the phrase \"any item A must be before any item", "A helpful way to think about the wildcard expansion is to use the", "of food-items. Any items that do not have a relationship must be printed", "the order-relationships they have, print the banquet schedule. If a given food item", "Description Print the correct order of food-items with a preceding index, starting from", "afterwards. A food-relationship is where the first item must be served before the", "second item. Note that in the food-relationships list, some food-item names can use", "planning a big banquet, but need to figure out the order in which", "message. # Sample Inputs & Outputs ## Sample Input 1 3 3 salad", "Author's Note: This challenge has some subtle ordering logic that might be hard", "before writing code. Make sure to expand all widecard rules as well. \"\"\"", "All food-items will be listed first on their own lines, then all food-relationships", "explicit. In such a case, group the items together. ## Output Description Print", "for items, list them together on the same line as a comma-delimited array", "is the number of food-relationships. Food-items are unique single-word lower-case names with optional", "rules: turkey almond_pie turkey_stuffing almond_pie turkey pecan_pie turkey_stuffing pecan_pie A helpful way to", "pecan pie, which can be eaten any time after the main meal. Given", "food-relationships will be listed on their own lines afterwards. A food-relationship is where", "of strings that fit the wildcard. For example, using the items from Sample", "this by expanding the rule to fulfill any combination of strings that fit", "3 3 salad turkey dessert salad dessert turkey dessert salad turkey ## Sample", "be before any item B\". An example would be the food-relationship \"\\*pie coffee\",", "sample data 2 by hand to better understand the ordering rules before writing", "Outputs ## Input Description On standard console input, you will be given two", "foods are more simple, like a pecan pie, which can be eaten any", "but the ordering of desserts may not be explicit. In such a case,", "any combination of strings that fit the wildcard. For example, using the items", "items, while M is the number of food-relationships. Food-items are unique single-word lower-case", "which can be eaten any time after the main meal. Given a list", "food-items will be listed first on their own lines, then all food-relationships will", "'_' character), while food-relationships are two food items that are space delimited. All", "standard console input, you will be given two space-delimited integers, N and M.", "An example would be the food-relationship \"\\*pie coffee\", which can be read as", "strings that fit the wildcard. For example, using the items from Sample Input", "## Sample Input 2 8 5 turkey pecan_pie salad crab_cakes almond_pie rice coffee", "item B\". An example would be the food-relationship \"\\*pie coffee\", which can be", "listed on their own lines afterwards. A food-relationship is where the first item", "ordering. # Author's Note: This challenge has some subtle ordering logic that might", "are ambiguous ordering for items, list them together on the same line as", "Hard] Banquet Planning https://www.reddit.com/r/dailyprogrammer/comments/1rnrs2/112813_challenge_137_intermediate_hard_banquet/ # [](#IntermediateIcon) *(Intermediate)*: Banquet Planning You and your friends", "on the same line as a comma-delimited array of food-items. Any items that", "M is the number of food-relationships. Food-items are unique single-word lower-case names with", "3. turkey_stuffing 4. turkey 5. almond_pie, pecan_pie 6. coffee Warning: Rice does not", "almond_pie, pecan_pie 6. coffee Warning: Rice does not have any ordering. # Author's", "two space-delimited integers, N and M. N is the number of food items,", "#137 [Intermediate / Hard] Banquet Planning https://www.reddit.com/r/dailyprogrammer/comments/1rnrs2/112813_challenge_137_intermediate_hard_banquet/ # [](#IntermediateIcon) *(Intermediate)*: Banquet Planning You", "\"any item A must be before any item B\". An example would be", "helpful way to think about the wildcard expansion is to use the phrase", "dessert turkey dessert salad turkey ## Sample Output 1 1. salad 2. turkey", "almond_pie turkey pecan_pie turkey_stuffing pecan_pie A helpful way to think about the wildcard", "Some food, like a turkey, have to be served after appetizers, but before", "are more simple, like a pecan pie, which can be eaten any time", "coffee, but the ordering of desserts may not be explicit. In such a", "*(Intermediate)*: Banquet Planning You and your friends are planning a big banquet, but", "# Author's Note: This challenge has some subtle ordering logic that might be", "items from Sample Input 2, the rule \"turkey\\* \\*_pie\" expands to the following", "be eaten any time after the main meal. Given a list of foods", "might have two desserts before coffee, but the ordering of desserts may not", "own lines, then all food-relationships will be listed on their own lines afterwards.", "1. salad 2. turkey 3. dessert ## Sample Input 2 8 5 turkey", "item must be served before the second item. Note that in the food-relationships", "You must support this by expanding the rule to fulfill any combination of", "to better understand the ordering rules before writing code. Make sure to expand", "a preceding index, starting from 1. If there are ambiguous ordering for items,", "to understand at first. Work through sample data 2 by hand to better", "to be served after appetizers, but before desserts. Other foods are more simple,", "be served after appetizers, but before desserts. Other foods are more simple, like", "Inputs & Outputs ## Sample Input 1 3 3 salad turkey dessert salad", "2 8 5 turkey pecan_pie salad crab_cakes almond_pie rice coffee turkey_stuffing turkey_stuffing turkey", "rules before writing code. Make sure to expand all widecard rules as well.", "which food will be served. Some food, like a turkey, have to be", "think about the wildcard expansion is to use the phrase \"any item A", "salad crab_cakes almond_pie rice coffee turkey_stuffing turkey_stuffing turkey turkey* *_pie *pie coffee salad", "using the items from Sample Input 2, the rule \"turkey\\* \\*_pie\" expands to", "list them together on the same line as a comma-delimited array of food-items.", "Input Description On standard console input, you will be given two space-delimited integers,", "1. crab_cakes 2. salad 3. turkey_stuffing 4. turkey 5. almond_pie, pecan_pie 6. coffee", "(the '_' character), while food-relationships are two food items that are space delimited.", "the following four rules: turkey almond_pie turkey_stuffing almond_pie turkey pecan_pie turkey_stuffing pecan_pie A", "ordering rules before writing code. Make sure to expand all widecard rules as", "that in the food-relationships list, some food-item names can use the [wildcard-character](http://en.wikipedia.org/wiki/Wildcard_character) '\\*'.", "to figure out the order in which food will be served. Some food,", "wildcard expansion is to use the phrase \"any item A must be before", "single-word lower-case names with optional underscores (the '_' character), while food-relationships are two", "there are ambiguous ordering for items, list them together on the same line", "[Intermediate / Hard] Banquet Planning https://www.reddit.com/r/dailyprogrammer/comments/1rnrs2/112813_challenge_137_intermediate_hard_banquet/ # [](#IntermediateIcon) *(Intermediate)*: Banquet Planning You and", "simple, like a pecan pie, which can be eaten any time after the", "Input 2 8 5 turkey pecan_pie salad crab_cakes almond_pie rice coffee turkey_stuffing turkey_stuffing", "Banquet Planning You and your friends are planning a big banquet, but need", "food item cannot be placed in this schedule, write an error message for", "be served. Some food, like a turkey, have to be served after appetizers,", "fulfill any combination of strings that fit the wildcard. For example, using the", "have any ordering. # Author's Note: This challenge has some subtle ordering logic", "would be the food-relationship \"\\*pie coffee\", which can be read as \"any pie", "turkey pecan_pie salad crab_cakes almond_pie rice coffee turkey_stuffing turkey_stuffing turkey turkey* *_pie *pie", "2 1. crab_cakes 2. salad 3. turkey_stuffing 4. turkey 5. almond_pie, pecan_pie 6.", "Any items that do not have a relationship must be printed with a", "2. turkey 3. dessert ## Sample Input 2 8 5 turkey pecan_pie salad", "for it. # Formal Inputs & Outputs ## Input Description On standard console", "rice coffee turkey_stuffing turkey_stuffing turkey turkey* *_pie *pie coffee salad turkey* crab_cakes salad", "their own lines afterwards. A food-relationship is where the first item must be", "given two space-delimited integers, N and M. N is the number of food", "line as a comma-delimited array of food-items. Any items that do not have", "dessert salad turkey ## Sample Output 1 1. salad 2. turkey 3. dessert", "like a pecan pie, which can be eaten any time after the main", "have a relationship must be printed with a warning or error message. #", "be listed first on their own lines, then all food-relationships will be listed", "the banquet schedule. If a given food item cannot be placed in this", "following four rules: turkey almond_pie turkey_stuffing almond_pie turkey pecan_pie turkey_stuffing pecan_pie A helpful", "served. Some food, like a turkey, have to be served after appetizers, but", "## Input Description On standard console input, you will be given two space-delimited", "be explicit. In such a case, group the items together. ## Output Description", "like a turkey, have to be served after appetizers, but before desserts. Other", "by hand to better understand the ordering rules before writing code. Make sure", "turkey, have to be served after appetizers, but before desserts. Other foods are", "as \"any pie must be before coffee\". Some orderings may be ambiguous: you" ]
[ "edge(a,b)) fp.rule(path(a,c), [edge(a,b),path(b,c)]) v1 = BitVecVal(1,s) v2 = BitVecVal(2,s) v3 = BitVecVal(3,s) v4", "= BitVecVal(4,s) fp.fact(edge(v1,v2)) fp.fact(edge(v1,v3)) fp.fact(edge(v2,v4)) print \"current set of rules\", fp print fp.query(path(v1,v4)),", "of rules\", fp print fp.query(path(v1,v4)), \"yes we can reach v4 from v1\" print", "= BitVecVal(3,s) v4 = BitVecVal(4,s) fp.fact(edge(v1,v2)) fp.fact(edge(v1,v3)) fp.fact(edge(v2,v4)) print \"current set of rules\",", "print \"current set of rules\", fp print fp.query(path(v1,v4)), \"yes we can reach v4", "Const('c',s) fp.register_relation(path,edge) fp.declare_var(a,b,c) fp.rule(path(a,b), edge(a,b)) fp.rule(path(a,c), [edge(a,b),path(b,c)]) v1 = BitVecVal(1,s) v2 = BitVecVal(2,s)", "rules\", fp print fp.query(path(v1,v4)), \"yes we can reach v4 from v1\" print fp.query(path(v3,v4)),", "we can reach v4 from v1\" print fp.query(path(v3,v4)), \"no we cannot reach v4", "= BitVecVal(1,s) v2 = BitVecVal(2,s) v3 = BitVecVal(3,s) v4 = BitVecVal(4,s) fp.fact(edge(v1,v2)) fp.fact(edge(v1,v3))", "= Const('c',s) fp.register_relation(path,edge) fp.declare_var(a,b,c) fp.rule(path(a,b), edge(a,b)) fp.rule(path(a,c), [edge(a,b),path(b,c)]) v1 = BitVecVal(1,s) v2 =", "a = Const('a',s) b = Const('b',s) c = Const('c',s) fp.register_relation(path,edge) fp.declare_var(a,b,c) fp.rule(path(a,b), edge(a,b))", "set of rules\", fp print fp.query(path(v1,v4)), \"yes we can reach v4 from v1\"", "fp.query(path(v1,v4)), \"yes we can reach v4 from v1\" print fp.query(path(v3,v4)), \"no we cannot", "\"yes we can reach v4 from v1\" print fp.query(path(v3,v4)), \"no we cannot reach", "BoolSort()) a = Const('a',s) b = Const('b',s) c = Const('c',s) fp.register_relation(path,edge) fp.declare_var(a,b,c) fp.rule(path(a,b),", "fp.fact(edge(v1,v2)) fp.fact(edge(v1,v3)) fp.fact(edge(v2,v4)) print \"current set of rules\", fp print fp.query(path(v1,v4)), \"yes we", "path = Function('path', s, s, BoolSort()) a = Const('a',s) b = Const('b',s) c", "s = BitVecSort(3) edge = Function('edge', s, s, BoolSort()) path = Function('path', s,", "edge = Function('edge', s, s, BoolSort()) path = Function('path', s, s, BoolSort()) a", "b = Const('b',s) c = Const('c',s) fp.register_relation(path,edge) fp.declare_var(a,b,c) fp.rule(path(a,b), edge(a,b)) fp.rule(path(a,c), [edge(a,b),path(b,c)]) v1", "\"current set of rules\", fp print fp.query(path(v1,v4)), \"yes we can reach v4 from", "s, BoolSort()) a = Const('a',s) b = Const('b',s) c = Const('c',s) fp.register_relation(path,edge) fp.declare_var(a,b,c)", "BitVecVal(2,s) v3 = BitVecVal(3,s) v4 = BitVecVal(4,s) fp.fact(edge(v1,v2)) fp.fact(edge(v1,v3)) fp.fact(edge(v2,v4)) print \"current set", "[edge(a,b),path(b,c)]) v1 = BitVecVal(1,s) v2 = BitVecVal(2,s) v3 = BitVecVal(3,s) v4 = BitVecVal(4,s)", "reach v4 from v1\" print fp.query(path(v3,v4)), \"no we cannot reach v4 from v3\"", "Const('b',s) c = Const('c',s) fp.register_relation(path,edge) fp.declare_var(a,b,c) fp.rule(path(a,b), edge(a,b)) fp.rule(path(a,c), [edge(a,b),path(b,c)]) v1 = BitVecVal(1,s)", "v1 = BitVecVal(1,s) v2 = BitVecVal(2,s) v3 = BitVecVal(3,s) v4 = BitVecVal(4,s) fp.fact(edge(v1,v2))", "v4 = BitVecVal(4,s) fp.fact(edge(v1,v2)) fp.fact(edge(v1,v3)) fp.fact(edge(v2,v4)) print \"current set of rules\", fp print", "fp.set(engine='datalog') s = BitVecSort(3) edge = Function('edge', s, s, BoolSort()) path = Function('path',", "= BitVecVal(2,s) v3 = BitVecVal(3,s) v4 = BitVecVal(4,s) fp.fact(edge(v1,v2)) fp.fact(edge(v1,v3)) fp.fact(edge(v2,v4)) print \"current", "c = Const('c',s) fp.register_relation(path,edge) fp.declare_var(a,b,c) fp.rule(path(a,b), edge(a,b)) fp.rule(path(a,c), [edge(a,b),path(b,c)]) v1 = BitVecVal(1,s) v2", "= Const('a',s) b = Const('b',s) c = Const('c',s) fp.register_relation(path,edge) fp.declare_var(a,b,c) fp.rule(path(a,b), edge(a,b)) fp.rule(path(a,c),", "print fp.query(path(v1,v4)), \"yes we can reach v4 from v1\" print fp.query(path(v3,v4)), \"no we", "= BitVecSort(3) edge = Function('edge', s, s, BoolSort()) path = Function('path', s, s,", "s, s, BoolSort()) path = Function('path', s, s, BoolSort()) a = Const('a',s) b", "BitVecVal(4,s) fp.fact(edge(v1,v2)) fp.fact(edge(v1,v3)) fp.fact(edge(v2,v4)) print \"current set of rules\", fp print fp.query(path(v1,v4)), \"yes", "= Function('path', s, s, BoolSort()) a = Const('a',s) b = Const('b',s) c =", "BitVecVal(1,s) v2 = BitVecVal(2,s) v3 = BitVecVal(3,s) v4 = BitVecVal(4,s) fp.fact(edge(v1,v2)) fp.fact(edge(v1,v3)) fp.fact(edge(v2,v4))", "BitVecVal(3,s) v4 = BitVecVal(4,s) fp.fact(edge(v1,v2)) fp.fact(edge(v1,v3)) fp.fact(edge(v2,v4)) print \"current set of rules\", fp", "fp.fact(edge(v1,v3)) fp.fact(edge(v2,v4)) print \"current set of rules\", fp print fp.query(path(v1,v4)), \"yes we can", "Function('edge', s, s, BoolSort()) path = Function('path', s, s, BoolSort()) a = Const('a',s)", "s, BoolSort()) path = Function('path', s, s, BoolSort()) a = Const('a',s) b =", "= Function('edge', s, s, BoolSort()) path = Function('path', s, s, BoolSort()) a =", "fp print fp.query(path(v1,v4)), \"yes we can reach v4 from v1\" print fp.query(path(v3,v4)), \"no", "= Const('b',s) c = Const('c',s) fp.register_relation(path,edge) fp.declare_var(a,b,c) fp.rule(path(a,b), edge(a,b)) fp.rule(path(a,c), [edge(a,b),path(b,c)]) v1 =", "fp.rule(path(a,c), [edge(a,b),path(b,c)]) v1 = BitVecVal(1,s) v2 = BitVecVal(2,s) v3 = BitVecVal(3,s) v4 =", "v3 = BitVecVal(3,s) v4 = BitVecVal(4,s) fp.fact(edge(v1,v2)) fp.fact(edge(v1,v3)) fp.fact(edge(v2,v4)) print \"current set of", "= Fixedpoint() fp.set(engine='datalog') s = BitVecSort(3) edge = Function('edge', s, s, BoolSort()) path", "fp = Fixedpoint() fp.set(engine='datalog') s = BitVecSort(3) edge = Function('edge', s, s, BoolSort())", "Fixedpoint() fp.set(engine='datalog') s = BitVecSort(3) edge = Function('edge', s, s, BoolSort()) path =", "BoolSort()) path = Function('path', s, s, BoolSort()) a = Const('a',s) b = Const('b',s)", "Const('a',s) b = Const('b',s) c = Const('c',s) fp.register_relation(path,edge) fp.declare_var(a,b,c) fp.rule(path(a,b), edge(a,b)) fp.rule(path(a,c), [edge(a,b),path(b,c)])", "fp.register_relation(path,edge) fp.declare_var(a,b,c) fp.rule(path(a,b), edge(a,b)) fp.rule(path(a,c), [edge(a,b),path(b,c)]) v1 = BitVecVal(1,s) v2 = BitVecVal(2,s) v3", "s, s, BoolSort()) a = Const('a',s) b = Const('b',s) c = Const('c',s) fp.register_relation(path,edge)", "can reach v4 from v1\" print fp.query(path(v3,v4)), \"no we cannot reach v4 from", "Function('path', s, s, BoolSort()) a = Const('a',s) b = Const('b',s) c = Const('c',s)", "v2 = BitVecVal(2,s) v3 = BitVecVal(3,s) v4 = BitVecVal(4,s) fp.fact(edge(v1,v2)) fp.fact(edge(v1,v3)) fp.fact(edge(v2,v4)) print", "fp.fact(edge(v2,v4)) print \"current set of rules\", fp print fp.query(path(v1,v4)), \"yes we can reach", "fp.rule(path(a,b), edge(a,b)) fp.rule(path(a,c), [edge(a,b),path(b,c)]) v1 = BitVecVal(1,s) v2 = BitVecVal(2,s) v3 = BitVecVal(3,s)", "fp.declare_var(a,b,c) fp.rule(path(a,b), edge(a,b)) fp.rule(path(a,c), [edge(a,b),path(b,c)]) v1 = BitVecVal(1,s) v2 = BitVecVal(2,s) v3 =", "BitVecSort(3) edge = Function('edge', s, s, BoolSort()) path = Function('path', s, s, BoolSort())" ]
[ "to a convolution layer Arguments: shape (tuple): The desired shape of the placeholder,", "\"\"\" H = ng.make_axis(name=\"H\", docstring=\"Height\") W = ng.make_axis(name=\"W\", docstring=\"Width\") D = ng.make_axis(name=\"D\", docstring=\"Depth\")", "C, D, H, W, N Returns: 5-D placeholder op \"\"\" H = ng.make_axis(name=\"H\",", "None: x.axes.set_shape(shape) return x def get_function_or_class_name(obj): if hasattr(obj, \"__name__\"): name = obj.__name__ elif", "ng from .axis import ax def make_convolution_placeholder(shape=None): \"\"\" Create a placeholder op for", "The desired shape of the placeholder, with axes in the order of C,", "x def get_function_or_class_name(obj): if hasattr(obj, \"__name__\"): name = obj.__name__ elif callable(obj): name =", "Arguments: shape (tuple): The desired shape of the placeholder, with axes in the", "ng.make_axis(name=\"C\", docstring=\"Channel\") x = ng.placeholder(axes=ng.make_axes([C, D, H, W, ax.N])) if shape is not", "D = ng.make_axis(name=\"D\", docstring=\"Depth\") C = ng.make_axis(name=\"C\", docstring=\"Channel\") x = ng.placeholder(axes=ng.make_axes([C, D, H,", "(tuple): The desired shape of the placeholder, with axes in the order of", "N Returns: 5-D placeholder op \"\"\" H = ng.make_axis(name=\"H\", docstring=\"Height\") W = ng.make_axis(name=\"W\",", "__future__ import absolute_import import neon as ng from .axis import ax def make_convolution_placeholder(shape=None):", "docstring=\"Height\") W = ng.make_axis(name=\"W\", docstring=\"Width\") D = ng.make_axis(name=\"D\", docstring=\"Depth\") C = ng.make_axis(name=\"C\", docstring=\"Channel\")", "= ng.placeholder(axes=ng.make_axes([C, D, H, W, ax.N])) if shape is not None: x.axes.set_shape(shape) return", "ng.make_axis(name=\"H\", docstring=\"Height\") W = ng.make_axis(name=\"W\", docstring=\"Width\") D = ng.make_axis(name=\"D\", docstring=\"Depth\") C = ng.make_axis(name=\"C\",", "H, W, N Returns: 5-D placeholder op \"\"\" H = ng.make_axis(name=\"H\", docstring=\"Height\") W", "the placeholder, with axes in the order of C, D, H, W, N", "= ng.make_axis(name=\"H\", docstring=\"Height\") W = ng.make_axis(name=\"W\", docstring=\"Width\") D = ng.make_axis(name=\"D\", docstring=\"Depth\") C =", "H, W, ax.N])) if shape is not None: x.axes.set_shape(shape) return x def get_function_or_class_name(obj):", "5-D placeholder op \"\"\" H = ng.make_axis(name=\"H\", docstring=\"Height\") W = ng.make_axis(name=\"W\", docstring=\"Width\") D", "docstring=\"Depth\") C = ng.make_axis(name=\"C\", docstring=\"Channel\") x = ng.placeholder(axes=ng.make_axes([C, D, H, W, ax.N])) if", "order of C, D, H, W, N Returns: 5-D placeholder op \"\"\" H", "axes in the order of C, D, H, W, N Returns: 5-D placeholder", "ng.make_axis(name=\"D\", docstring=\"Depth\") C = ng.make_axis(name=\"C\", docstring=\"Channel\") x = ng.placeholder(axes=ng.make_axes([C, D, H, W, ax.N]))", ".axis import ax def make_convolution_placeholder(shape=None): \"\"\" Create a placeholder op for inputs to", "is not None: x.axes.set_shape(shape) return x def get_function_or_class_name(obj): if hasattr(obj, \"__name__\"): name =", "for inputs to a convolution layer Arguments: shape (tuple): The desired shape of", "layer Arguments: shape (tuple): The desired shape of the placeholder, with axes in", "ng.make_axis(name=\"W\", docstring=\"Width\") D = ng.make_axis(name=\"D\", docstring=\"Depth\") C = ng.make_axis(name=\"C\", docstring=\"Channel\") x = ng.placeholder(axes=ng.make_axes([C,", "H = ng.make_axis(name=\"H\", docstring=\"Height\") W = ng.make_axis(name=\"W\", docstring=\"Width\") D = ng.make_axis(name=\"D\", docstring=\"Depth\") C", "from .axis import ax def make_convolution_placeholder(shape=None): \"\"\" Create a placeholder op for inputs", "W, ax.N])) if shape is not None: x.axes.set_shape(shape) return x def get_function_or_class_name(obj): if", "def make_convolution_placeholder(shape=None): \"\"\" Create a placeholder op for inputs to a convolution layer", "shape is not None: x.axes.set_shape(shape) return x def get_function_or_class_name(obj): if hasattr(obj, \"__name__\"): name", "placeholder, with axes in the order of C, D, H, W, N Returns:", "docstring=\"Width\") D = ng.make_axis(name=\"D\", docstring=\"Depth\") C = ng.make_axis(name=\"C\", docstring=\"Channel\") x = ng.placeholder(axes=ng.make_axes([C, D,", "op \"\"\" H = ng.make_axis(name=\"H\", docstring=\"Height\") W = ng.make_axis(name=\"W\", docstring=\"Width\") D = ng.make_axis(name=\"D\",", "neon as ng from .axis import ax def make_convolution_placeholder(shape=None): \"\"\" Create a placeholder", "import absolute_import import neon as ng from .axis import ax def make_convolution_placeholder(shape=None): \"\"\"", "not None: x.axes.set_shape(shape) return x def get_function_or_class_name(obj): if hasattr(obj, \"__name__\"): name = obj.__name__", "x.axes.set_shape(shape) return x def get_function_or_class_name(obj): if hasattr(obj, \"__name__\"): name = obj.__name__ elif callable(obj):", "ax def make_convolution_placeholder(shape=None): \"\"\" Create a placeholder op for inputs to a convolution", "of the placeholder, with axes in the order of C, D, H, W,", "Create a placeholder op for inputs to a convolution layer Arguments: shape (tuple):", "<filename>src/neon/frontend/utils.py from __future__ import absolute_import import neon as ng from .axis import ax", "placeholder op for inputs to a convolution layer Arguments: shape (tuple): The desired", "C = ng.make_axis(name=\"C\", docstring=\"Channel\") x = ng.placeholder(axes=ng.make_axes([C, D, H, W, ax.N])) if shape", "ng.placeholder(axes=ng.make_axes([C, D, H, W, ax.N])) if shape is not None: x.axes.set_shape(shape) return x", "def get_function_or_class_name(obj): if hasattr(obj, \"__name__\"): name = obj.__name__ elif callable(obj): name = type(obj).__name__", "from __future__ import absolute_import import neon as ng from .axis import ax def", "absolute_import import neon as ng from .axis import ax def make_convolution_placeholder(shape=None): \"\"\" Create", "make_convolution_placeholder(shape=None): \"\"\" Create a placeholder op for inputs to a convolution layer Arguments:", "\"__name__\"): name = obj.__name__ elif callable(obj): name = type(obj).__name__ else: name = None", "the order of C, D, H, W, N Returns: 5-D placeholder op \"\"\"", "hasattr(obj, \"__name__\"): name = obj.__name__ elif callable(obj): name = type(obj).__name__ else: name =", "W = ng.make_axis(name=\"W\", docstring=\"Width\") D = ng.make_axis(name=\"D\", docstring=\"Depth\") C = ng.make_axis(name=\"C\", docstring=\"Channel\") x", "= ng.make_axis(name=\"W\", docstring=\"Width\") D = ng.make_axis(name=\"D\", docstring=\"Depth\") C = ng.make_axis(name=\"C\", docstring=\"Channel\") x =", "= ng.make_axis(name=\"D\", docstring=\"Depth\") C = ng.make_axis(name=\"C\", docstring=\"Channel\") x = ng.placeholder(axes=ng.make_axes([C, D, H, W,", "shape of the placeholder, with axes in the order of C, D, H,", "D, H, W, N Returns: 5-D placeholder op \"\"\" H = ng.make_axis(name=\"H\", docstring=\"Height\")", "= obj.__name__ elif callable(obj): name = type(obj).__name__ else: name = None return name", "import neon as ng from .axis import ax def make_convolution_placeholder(shape=None): \"\"\" Create a", "ax.N])) if shape is not None: x.axes.set_shape(shape) return x def get_function_or_class_name(obj): if hasattr(obj,", "in the order of C, D, H, W, N Returns: 5-D placeholder op", "op for inputs to a convolution layer Arguments: shape (tuple): The desired shape", "get_function_or_class_name(obj): if hasattr(obj, \"__name__\"): name = obj.__name__ elif callable(obj): name = type(obj).__name__ else:", "\"\"\" Create a placeholder op for inputs to a convolution layer Arguments: shape", "return x def get_function_or_class_name(obj): if hasattr(obj, \"__name__\"): name = obj.__name__ elif callable(obj): name", "shape (tuple): The desired shape of the placeholder, with axes in the order", "if hasattr(obj, \"__name__\"): name = obj.__name__ elif callable(obj): name = type(obj).__name__ else: name", "inputs to a convolution layer Arguments: shape (tuple): The desired shape of the", "name = obj.__name__ elif callable(obj): name = type(obj).__name__ else: name = None return", "if shape is not None: x.axes.set_shape(shape) return x def get_function_or_class_name(obj): if hasattr(obj, \"__name__\"):", "with axes in the order of C, D, H, W, N Returns: 5-D", "convolution layer Arguments: shape (tuple): The desired shape of the placeholder, with axes", "docstring=\"Channel\") x = ng.placeholder(axes=ng.make_axes([C, D, H, W, ax.N])) if shape is not None:", "x = ng.placeholder(axes=ng.make_axes([C, D, H, W, ax.N])) if shape is not None: x.axes.set_shape(shape)", "desired shape of the placeholder, with axes in the order of C, D,", "of C, D, H, W, N Returns: 5-D placeholder op \"\"\" H =", "a convolution layer Arguments: shape (tuple): The desired shape of the placeholder, with", "placeholder op \"\"\" H = ng.make_axis(name=\"H\", docstring=\"Height\") W = ng.make_axis(name=\"W\", docstring=\"Width\") D =", "as ng from .axis import ax def make_convolution_placeholder(shape=None): \"\"\" Create a placeholder op", "import ax def make_convolution_placeholder(shape=None): \"\"\" Create a placeholder op for inputs to a", "W, N Returns: 5-D placeholder op \"\"\" H = ng.make_axis(name=\"H\", docstring=\"Height\") W =", "= ng.make_axis(name=\"C\", docstring=\"Channel\") x = ng.placeholder(axes=ng.make_axes([C, D, H, W, ax.N])) if shape is", "D, H, W, ax.N])) if shape is not None: x.axes.set_shape(shape) return x def", "a placeholder op for inputs to a convolution layer Arguments: shape (tuple): The", "Returns: 5-D placeholder op \"\"\" H = ng.make_axis(name=\"H\", docstring=\"Height\") W = ng.make_axis(name=\"W\", docstring=\"Width\")" ]
[ "03:27 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies =", "<gh_stars>1-10 # -*- coding: utf-8 -*- # Generated by Django 1.10.8 on 2019-02-28", "on 2019-02-28 03:27 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration):", "# -*- coding: utf-8 -*- # Generated by Django 1.10.8 on 2019-02-28 03:27", "import migrations class Migration(migrations.Migration): dependencies = [ ('wildlifecompliance', '0124_auto_20190228_1035'), ] operations = [", "Migration(migrations.Migration): dependencies = [ ('wildlifecompliance', '0124_auto_20190228_1035'), ] operations = [ migrations.RemoveField( model_name='application', name='proposed_decline_status',", "from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('wildlifecompliance', '0124_auto_20190228_1035'), ] operations", "coding: utf-8 -*- # Generated by Django 1.10.8 on 2019-02-28 03:27 from __future__", "-*- coding: utf-8 -*- # Generated by Django 1.10.8 on 2019-02-28 03:27 from", "('wildlifecompliance', '0124_auto_20190228_1035'), ] operations = [ migrations.RemoveField( model_name='application', name='proposed_decline_status', ), migrations.RemoveField( model_name='application', name='proposed_issuance_licence',", "__future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('wildlifecompliance',", "Django 1.10.8 on 2019-02-28 03:27 from __future__ import unicode_literals from django.db import migrations", "[ ('wildlifecompliance', '0124_auto_20190228_1035'), ] operations = [ migrations.RemoveField( model_name='application', name='proposed_decline_status', ), migrations.RemoveField( model_name='application',", "django.db import migrations class Migration(migrations.Migration): dependencies = [ ('wildlifecompliance', '0124_auto_20190228_1035'), ] operations =", "utf-8 -*- # Generated by Django 1.10.8 on 2019-02-28 03:27 from __future__ import", "-*- # Generated by Django 1.10.8 on 2019-02-28 03:27 from __future__ import unicode_literals", "1.10.8 on 2019-02-28 03:27 from __future__ import unicode_literals from django.db import migrations class", "unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('wildlifecompliance', '0124_auto_20190228_1035'), ]", "# Generated by Django 1.10.8 on 2019-02-28 03:27 from __future__ import unicode_literals from", "= [ ('wildlifecompliance', '0124_auto_20190228_1035'), ] operations = [ migrations.RemoveField( model_name='application', name='proposed_decline_status', ), migrations.RemoveField(", "from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [", "import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('wildlifecompliance', '0124_auto_20190228_1035'),", "migrations class Migration(migrations.Migration): dependencies = [ ('wildlifecompliance', '0124_auto_20190228_1035'), ] operations = [ migrations.RemoveField(", "'0124_auto_20190228_1035'), ] operations = [ migrations.RemoveField( model_name='application', name='proposed_decline_status', ), migrations.RemoveField( model_name='application', name='proposed_issuance_licence', ),", "dependencies = [ ('wildlifecompliance', '0124_auto_20190228_1035'), ] operations = [ migrations.RemoveField( model_name='application', name='proposed_decline_status', ),", "] operations = [ migrations.RemoveField( model_name='application', name='proposed_decline_status', ), migrations.RemoveField( model_name='application', name='proposed_issuance_licence', ), ]", "Generated by Django 1.10.8 on 2019-02-28 03:27 from __future__ import unicode_literals from django.db", "by Django 1.10.8 on 2019-02-28 03:27 from __future__ import unicode_literals from django.db import", "2019-02-28 03:27 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies", "class Migration(migrations.Migration): dependencies = [ ('wildlifecompliance', '0124_auto_20190228_1035'), ] operations = [ migrations.RemoveField( model_name='application'," ]
[ "track=False) send_array(socket, px, flags=0, copy=True, track=False) send_array(socket, end_position, flags=0, copy=True, track=False) print(\"message sent\")", "metadata\"\"\" md = dict( dtype = str(A.dtype), shape = A.shape, ) socket.send_json(md, flags|zmq.SNDMORE)", "track=False) print(\"message sent\") elif command == \"action\": print(1) elif command == \"done\": print(2)", "def send_array(socket, A, flags=0, copy=True, track=False): \"\"\"send a numpy array with metadata\"\"\" md", "done, infos, px, end_position = data send_array(socket, joint_state, flags=0, copy=True, track=False) send_array(socket, np.array(reward),", "A.shape, ) socket.send_json(md, flags|zmq.SNDMORE) return socket.send(A, flags, copy=copy, track=track) def test_inmoov_gym(): while True:", "send_array(socket, np.array(done), flags=0, copy=True, track=False) send_array(socket, px, flags=0, copy=True, track=False) send_array(socket, end_position, flags=0,", "len(init_pose) while True: msg = socket.recv_json() command = msg[\"command\"] if command == \"position\":", "md = dict( dtype = str(A.dtype), shape = A.shape, ) socket.send_json(md, flags|zmq.SNDMORE) return", "InmoovGymEnv(debug_mode=True, positional_control=True) init_pose = robot._inmoov.get_joints_pos() joints_num = len(init_pose) while True: msg = socket.recv_json()", "data = robot.server_step(msg[command]) joint_state, reward, done, infos, px, end_position = data send_array(socket, joint_state,", "command == \"action\": print(1) elif command == \"done\": print(2) elif command == \"reset\":", "= str(A.dtype), shape = A.shape, ) socket.send_json(md, flags|zmq.SNDMORE) return socket.send(A, flags, copy=copy, track=track)", "flags=0, copy=True, track=False) send_array(socket, end_position, flags=0, copy=True, track=False) print(\"message sent\") elif command ==", "SERVER_PORT = 7777 HOSTNAME = 'localhost' def send_array(socket, A, flags=0, copy=True, track=False): \"\"\"send", "robot.step(action) except: continue # robot.step() if __name__ == \"__main__\": socket = server_connection() robot", "== \"position\": data = robot.server_step(msg[command]) joint_state, reward, done, infos, px, end_position = data", "= input() try: # time.sleep(0.5) action = np.zeros(shape=(joints_num,)) signal = k.split() joint, move", "reward, done, infos, px, end_position = data send_array(socket, joint_state, flags=0, copy=True, track=False) send_array(socket,", "action[joint] = move robot.step(action) except: continue # robot.step() if __name__ == \"__main__\": socket", "end_position, flags=0, copy=True, track=False) print(\"message sent\") elif command == \"action\": print(1) elif command", "move robot.step(action) except: continue # robot.step() if __name__ == \"__main__\": socket = server_connection()", "robot.step() if __name__ == \"__main__\": socket = server_connection() robot = InmoovGymEnv(debug_mode=True, positional_control=True) init_pose", "joint, move = int(signal[0]), float(signal[1]) action[joint] = move robot.step(action) except: continue # robot.step()", "= np.zeros(shape=(joints_num,)) signal = k.split() joint, move = int(signal[0]), float(signal[1]) action[joint] = move", "shape = A.shape, ) socket.send_json(md, flags|zmq.SNDMORE) return socket.send(A, flags, copy=copy, track=track) def test_inmoov_gym():", "copy=True, track=False) send_array(socket, px, flags=0, copy=True, track=False) send_array(socket, end_position, flags=0, copy=True, track=False) print(\"message", "px, end_position = data send_array(socket, joint_state, flags=0, copy=True, track=False) send_array(socket, np.array(reward), flags=0, copy=True,", "px, flags=0, copy=True, track=False) send_array(socket, end_position, flags=0, copy=True, track=False) print(\"message sent\") elif command", "= int(signal[0]), float(signal[1]) action[joint] = move robot.step(action) except: continue # robot.step() if __name__", "copy=True, track=False): \"\"\"send a numpy array with metadata\"\"\" md = dict( dtype =", "copy=copy, track=track) def test_inmoov_gym(): while True: k = input() try: # time.sleep(0.5) action", "= socket.recv_json() command = msg[\"command\"] if command == \"position\": data = robot.server_step(msg[command]) joint_state,", "server_connection, client_ssh_connection, client_connection SERVER_PORT = 7777 HOSTNAME = 'localhost' def send_array(socket, A, flags=0,", "= A.shape, ) socket.send_json(md, flags|zmq.SNDMORE) return socket.send(A, flags, copy=copy, track=track) def test_inmoov_gym(): while", "a numpy array with metadata\"\"\" md = dict( dtype = str(A.dtype), shape =", "import InmoovGymEnv from .inmoov_server import server_connection, client_ssh_connection, client_connection SERVER_PORT = 7777 HOSTNAME =", "sent\") elif command == \"action\": print(1) elif command == \"done\": print(2) elif command", "str(A.dtype), shape = A.shape, ) socket.send_json(md, flags|zmq.SNDMORE) return socket.send(A, flags, copy=copy, track=track) def", "k = input() try: # time.sleep(0.5) action = np.zeros(shape=(joints_num,)) signal = k.split() joint,", "test_inmoov_gym(): while True: k = input() try: # time.sleep(0.5) action = np.zeros(shape=(joints_num,)) signal", "from zmq import ssh import numpy as np from environments.inmoov.inmoov_p2p_client_ready import InmoovGymEnv from", "if command == \"position\": data = robot.server_step(msg[command]) joint_state, reward, done, infos, px, end_position", "track=False): \"\"\"send a numpy array with metadata\"\"\" md = dict( dtype = str(A.dtype),", "from environments.inmoov.inmoov_p2p_client_ready import InmoovGymEnv from .inmoov_server import server_connection, client_ssh_connection, client_connection SERVER_PORT = 7777", "msg = socket.recv_json() command = msg[\"command\"] if command == \"position\": data = robot.server_step(msg[command])", "= robot._inmoov.get_joints_pos() joints_num = len(init_pose) while True: msg = socket.recv_json() command = msg[\"command\"]", "dict( dtype = str(A.dtype), shape = A.shape, ) socket.send_json(md, flags|zmq.SNDMORE) return socket.send(A, flags,", "return socket.send(A, flags, copy=copy, track=track) def test_inmoov_gym(): while True: k = input() try:", "__name__ == \"__main__\": socket = server_connection() robot = InmoovGymEnv(debug_mode=True, positional_control=True) init_pose = robot._inmoov.get_joints_pos()", "positional_control=True) init_pose = robot._inmoov.get_joints_pos() joints_num = len(init_pose) while True: msg = socket.recv_json() command", "flags=0, copy=True, track=False): \"\"\"send a numpy array with metadata\"\"\" md = dict( dtype", "robot._inmoov.get_joints_pos() joints_num = len(init_pose) while True: msg = socket.recv_json() command = msg[\"command\"] if", "= msg[\"command\"] if command == \"position\": data = robot.server_step(msg[command]) joint_state, reward, done, infos,", "# robot.step() if __name__ == \"__main__\": socket = server_connection() robot = InmoovGymEnv(debug_mode=True, positional_control=True)", "environments.inmoov.inmoov_p2p_client_ready import InmoovGymEnv from .inmoov_server import server_connection, client_ssh_connection, client_connection SERVER_PORT = 7777 HOSTNAME", "== \"action\": print(1) elif command == \"done\": print(2) elif command == \"reset\": print(3)", "float(signal[1]) action[joint] = move robot.step(action) except: continue # robot.step() if __name__ == \"__main__\":", "HOSTNAME = 'localhost' def send_array(socket, A, flags=0, copy=True, track=False): \"\"\"send a numpy array", "np.array(done), flags=0, copy=True, track=False) send_array(socket, px, flags=0, copy=True, track=False) send_array(socket, end_position, flags=0, copy=True,", "7777 HOSTNAME = 'localhost' def send_array(socket, A, flags=0, copy=True, track=False): \"\"\"send a numpy", "def test_inmoov_gym(): while True: k = input() try: # time.sleep(0.5) action = np.zeros(shape=(joints_num,))", "copy=True, track=False) send_array(socket, np.array(done), flags=0, copy=True, track=False) send_array(socket, px, flags=0, copy=True, track=False) send_array(socket,", "input() try: # time.sleep(0.5) action = np.zeros(shape=(joints_num,)) signal = k.split() joint, move =", "zmq from zmq import ssh import numpy as np from environments.inmoov.inmoov_p2p_client_ready import InmoovGymEnv", "action = np.zeros(shape=(joints_num,)) signal = k.split() joint, move = int(signal[0]), float(signal[1]) action[joint] =", "send_array(socket, A, flags=0, copy=True, track=False): \"\"\"send a numpy array with metadata\"\"\" md =", "print(\"message sent\") elif command == \"action\": print(1) elif command == \"done\": print(2) elif", "zmq import ssh import numpy as np from environments.inmoov.inmoov_p2p_client_ready import InmoovGymEnv from .inmoov_server", "import numpy as np from environments.inmoov.inmoov_p2p_client_ready import InmoovGymEnv from .inmoov_server import server_connection, client_ssh_connection,", "numpy as np from environments.inmoov.inmoov_p2p_client_ready import InmoovGymEnv from .inmoov_server import server_connection, client_ssh_connection, client_connection", "InmoovGymEnv from .inmoov_server import server_connection, client_ssh_connection, client_connection SERVER_PORT = 7777 HOSTNAME = 'localhost'", "import server_connection, client_ssh_connection, client_connection SERVER_PORT = 7777 HOSTNAME = 'localhost' def send_array(socket, A,", "import zmq from zmq import ssh import numpy as np from environments.inmoov.inmoov_p2p_client_ready import", "robot.server_step(msg[command]) joint_state, reward, done, infos, px, end_position = data send_array(socket, joint_state, flags=0, copy=True,", "command = msg[\"command\"] if command == \"position\": data = robot.server_step(msg[command]) joint_state, reward, done,", "end_position = data send_array(socket, joint_state, flags=0, copy=True, track=False) send_array(socket, np.array(reward), flags=0, copy=True, track=False)", "= 7777 HOSTNAME = 'localhost' def send_array(socket, A, flags=0, copy=True, track=False): \"\"\"send a", "\"__main__\": socket = server_connection() robot = InmoovGymEnv(debug_mode=True, positional_control=True) init_pose = robot._inmoov.get_joints_pos() joints_num =", "dtype = str(A.dtype), shape = A.shape, ) socket.send_json(md, flags|zmq.SNDMORE) return socket.send(A, flags, copy=copy,", "command == \"position\": data = robot.server_step(msg[command]) joint_state, reward, done, infos, px, end_position =", ") socket.send_json(md, flags|zmq.SNDMORE) return socket.send(A, flags, copy=copy, track=track) def test_inmoov_gym(): while True: k", "\"position\": data = robot.server_step(msg[command]) joint_state, reward, done, infos, px, end_position = data send_array(socket,", "= data send_array(socket, joint_state, flags=0, copy=True, track=False) send_array(socket, np.array(reward), flags=0, copy=True, track=False) send_array(socket,", "init_pose = robot._inmoov.get_joints_pos() joints_num = len(init_pose) while True: msg = socket.recv_json() command =", "try: # time.sleep(0.5) action = np.zeros(shape=(joints_num,)) signal = k.split() joint, move = int(signal[0]),", "data send_array(socket, joint_state, flags=0, copy=True, track=False) send_array(socket, np.array(reward), flags=0, copy=True, track=False) send_array(socket, np.array(done),", "client_ssh_connection, client_connection SERVER_PORT = 7777 HOSTNAME = 'localhost' def send_array(socket, A, flags=0, copy=True,", "track=False) send_array(socket, np.array(done), flags=0, copy=True, track=False) send_array(socket, px, flags=0, copy=True, track=False) send_array(socket, end_position,", "server_connection() robot = InmoovGymEnv(debug_mode=True, positional_control=True) init_pose = robot._inmoov.get_joints_pos() joints_num = len(init_pose) while True:", "client_connection SERVER_PORT = 7777 HOSTNAME = 'localhost' def send_array(socket, A, flags=0, copy=True, track=False):", "track=False) send_array(socket, end_position, flags=0, copy=True, track=False) print(\"message sent\") elif command == \"action\": print(1)", "k.split() joint, move = int(signal[0]), float(signal[1]) action[joint] = move robot.step(action) except: continue #", "flags=0, copy=True, track=False) print(\"message sent\") elif command == \"action\": print(1) elif command ==", "\"\"\"send a numpy array with metadata\"\"\" md = dict( dtype = str(A.dtype), shape", "int(signal[0]), float(signal[1]) action[joint] = move robot.step(action) except: continue # robot.step() if __name__ ==", "np.array(reward), flags=0, copy=True, track=False) send_array(socket, np.array(done), flags=0, copy=True, track=False) send_array(socket, px, flags=0, copy=True,", "send_array(socket, joint_state, flags=0, copy=True, track=False) send_array(socket, np.array(reward), flags=0, copy=True, track=False) send_array(socket, np.array(done), flags=0,", "infos, px, end_position = data send_array(socket, joint_state, flags=0, copy=True, track=False) send_array(socket, np.array(reward), flags=0,", "== \"__main__\": socket = server_connection() robot = InmoovGymEnv(debug_mode=True, positional_control=True) init_pose = robot._inmoov.get_joints_pos() joints_num", "array with metadata\"\"\" md = dict( dtype = str(A.dtype), shape = A.shape, )", "copy=True, track=False) print(\"message sent\") elif command == \"action\": print(1) elif command == \"done\":", "flags=0, copy=True, track=False) send_array(socket, px, flags=0, copy=True, track=False) send_array(socket, end_position, flags=0, copy=True, track=False)", "track=False) send_array(socket, np.array(reward), flags=0, copy=True, track=False) send_array(socket, np.array(done), flags=0, copy=True, track=False) send_array(socket, px,", "numpy array with metadata\"\"\" md = dict( dtype = str(A.dtype), shape = A.shape,", "= k.split() joint, move = int(signal[0]), float(signal[1]) action[joint] = move robot.step(action) except: continue", "except: continue # robot.step() if __name__ == \"__main__\": socket = server_connection() robot =", "send_array(socket, px, flags=0, copy=True, track=False) send_array(socket, end_position, flags=0, copy=True, track=False) print(\"message sent\") elif", "joints_num = len(init_pose) while True: msg = socket.recv_json() command = msg[\"command\"] if command", "continue # robot.step() if __name__ == \"__main__\": socket = server_connection() robot = InmoovGymEnv(debug_mode=True,", "time.sleep(0.5) action = np.zeros(shape=(joints_num,)) signal = k.split() joint, move = int(signal[0]), float(signal[1]) action[joint]", "flags|zmq.SNDMORE) return socket.send(A, flags, copy=copy, track=track) def test_inmoov_gym(): while True: k = input()", "socket.send(A, flags, copy=copy, track=track) def test_inmoov_gym(): while True: k = input() try: #", "True: msg = socket.recv_json() command = msg[\"command\"] if command == \"position\": data =", "flags, copy=copy, track=track) def test_inmoov_gym(): while True: k = input() try: # time.sleep(0.5)", "= dict( dtype = str(A.dtype), shape = A.shape, ) socket.send_json(md, flags|zmq.SNDMORE) return socket.send(A,", "msg[\"command\"] if command == \"position\": data = robot.server_step(msg[command]) joint_state, reward, done, infos, px,", "= server_connection() robot = InmoovGymEnv(debug_mode=True, positional_control=True) init_pose = robot._inmoov.get_joints_pos() joints_num = len(init_pose) while", "with metadata\"\"\" md = dict( dtype = str(A.dtype), shape = A.shape, ) socket.send_json(md,", "robot = InmoovGymEnv(debug_mode=True, positional_control=True) init_pose = robot._inmoov.get_joints_pos() joints_num = len(init_pose) while True: msg", "'localhost' def send_array(socket, A, flags=0, copy=True, track=False): \"\"\"send a numpy array with metadata\"\"\"", "True: k = input() try: # time.sleep(0.5) action = np.zeros(shape=(joints_num,)) signal = k.split()", "while True: msg = socket.recv_json() command = msg[\"command\"] if command == \"position\": data", "= len(init_pose) while True: msg = socket.recv_json() command = msg[\"command\"] if command ==", "send_array(socket, np.array(reward), flags=0, copy=True, track=False) send_array(socket, np.array(done), flags=0, copy=True, track=False) send_array(socket, px, flags=0,", "# time.sleep(0.5) action = np.zeros(shape=(joints_num,)) signal = k.split() joint, move = int(signal[0]), float(signal[1])", "copy=True, track=False) send_array(socket, end_position, flags=0, copy=True, track=False) print(\"message sent\") elif command == \"action\":", "socket.recv_json() command = msg[\"command\"] if command == \"position\": data = robot.server_step(msg[command]) joint_state, reward,", "send_array(socket, end_position, flags=0, copy=True, track=False) print(\"message sent\") elif command == \"action\": print(1) elif", "= robot.server_step(msg[command]) joint_state, reward, done, infos, px, end_position = data send_array(socket, joint_state, flags=0,", "from .inmoov_server import server_connection, client_ssh_connection, client_connection SERVER_PORT = 7777 HOSTNAME = 'localhost' def", "= 'localhost' def send_array(socket, A, flags=0, copy=True, track=False): \"\"\"send a numpy array with", ".inmoov_server import server_connection, client_ssh_connection, client_connection SERVER_PORT = 7777 HOSTNAME = 'localhost' def send_array(socket,", "elif command == \"action\": print(1) elif command == \"done\": print(2) elif command ==", "flags=0, copy=True, track=False) send_array(socket, np.array(done), flags=0, copy=True, track=False) send_array(socket, px, flags=0, copy=True, track=False)", "socket.send_json(md, flags|zmq.SNDMORE) return socket.send(A, flags, copy=copy, track=track) def test_inmoov_gym(): while True: k =", "= move robot.step(action) except: continue # robot.step() if __name__ == \"__main__\": socket =", "= InmoovGymEnv(debug_mode=True, positional_control=True) init_pose = robot._inmoov.get_joints_pos() joints_num = len(init_pose) while True: msg =", "track=track) def test_inmoov_gym(): while True: k = input() try: # time.sleep(0.5) action =", "as np from environments.inmoov.inmoov_p2p_client_ready import InmoovGymEnv from .inmoov_server import server_connection, client_ssh_connection, client_connection SERVER_PORT", "A, flags=0, copy=True, track=False): \"\"\"send a numpy array with metadata\"\"\" md = dict(", "joint_state, flags=0, copy=True, track=False) send_array(socket, np.array(reward), flags=0, copy=True, track=False) send_array(socket, np.array(done), flags=0, copy=True,", "if __name__ == \"__main__\": socket = server_connection() robot = InmoovGymEnv(debug_mode=True, positional_control=True) init_pose =", "np.zeros(shape=(joints_num,)) signal = k.split() joint, move = int(signal[0]), float(signal[1]) action[joint] = move robot.step(action)", "move = int(signal[0]), float(signal[1]) action[joint] = move robot.step(action) except: continue # robot.step() if", "np from environments.inmoov.inmoov_p2p_client_ready import InmoovGymEnv from .inmoov_server import server_connection, client_ssh_connection, client_connection SERVER_PORT =", "copy=True, track=False) send_array(socket, np.array(reward), flags=0, copy=True, track=False) send_array(socket, np.array(done), flags=0, copy=True, track=False) send_array(socket,", "ssh import numpy as np from environments.inmoov.inmoov_p2p_client_ready import InmoovGymEnv from .inmoov_server import server_connection,", "while True: k = input() try: # time.sleep(0.5) action = np.zeros(shape=(joints_num,)) signal =", "socket = server_connection() robot = InmoovGymEnv(debug_mode=True, positional_control=True) init_pose = robot._inmoov.get_joints_pos() joints_num = len(init_pose)", "import ssh import numpy as np from environments.inmoov.inmoov_p2p_client_ready import InmoovGymEnv from .inmoov_server import", "flags=0, copy=True, track=False) send_array(socket, np.array(reward), flags=0, copy=True, track=False) send_array(socket, np.array(done), flags=0, copy=True, track=False)", "joint_state, reward, done, infos, px, end_position = data send_array(socket, joint_state, flags=0, copy=True, track=False)", "signal = k.split() joint, move = int(signal[0]), float(signal[1]) action[joint] = move robot.step(action) except:" ]
[ "method epsilon = 1e-15 delta = 1.0 while delta>epsilon: p0 = ones(N,float) p1", "ones(N,float) p1 = copy(x) for k in range(1,N): p0,p1 = p1,((2*k+1)*x*p1-k*p0)/(k+1) dp =", "p0,p1 = p1,((2*k+1)*x*p1-k*p0)/(k+1) dp = (N+1)*(p0-x*p1)/(1-x*x) dx = p1/dp x -= dx delta", "-= dx delta = max(abs(dx)) # Calculate the weights w = 2*(N+1)*(N+1)/(N*N*(1-x*x)*dp*dp) return", "<filename>PHY407/gaussxw.py from pylab import * def gaussxw(N): # Initial approximation to roots of", "x = cos(pi*a+1/(8*N*N*tan(a))) # Find roots using Newton's method epsilon = 1e-15 delta", "dx delta = max(abs(dx)) # Calculate the weights w = 2*(N+1)*(N+1)/(N*N*(1-x*x)*dp*dp) return x,w", "approximation to roots of the Legendre polynomial a = linspace(3,4*N-1,N)/(4*N+2) x = cos(pi*a+1/(8*N*N*tan(a)))", "delta = max(abs(dx)) # Calculate the weights w = 2*(N+1)*(N+1)/(N*N*(1-x*x)*dp*dp) return x,w def", "= cos(pi*a+1/(8*N*N*tan(a))) # Find roots using Newton's method epsilon = 1e-15 delta =", "* def gaussxw(N): # Initial approximation to roots of the Legendre polynomial a", "to roots of the Legendre polynomial a = linspace(3,4*N-1,N)/(4*N+2) x = cos(pi*a+1/(8*N*N*tan(a))) #", "cos(pi*a+1/(8*N*N*tan(a))) # Find roots using Newton's method epsilon = 1e-15 delta = 1.0", "dx = p1/dp x -= dx delta = max(abs(dx)) # Calculate the weights", "from pylab import * def gaussxw(N): # Initial approximation to roots of the", "1.0 while delta>epsilon: p0 = ones(N,float) p1 = copy(x) for k in range(1,N):", "= 1e-15 delta = 1.0 while delta>epsilon: p0 = ones(N,float) p1 = copy(x)", "while delta>epsilon: p0 = ones(N,float) p1 = copy(x) for k in range(1,N): p0,p1", "import * def gaussxw(N): # Initial approximation to roots of the Legendre polynomial", "p0 = ones(N,float) p1 = copy(x) for k in range(1,N): p0,p1 = p1,((2*k+1)*x*p1-k*p0)/(k+1)", "def gaussxw(N): # Initial approximation to roots of the Legendre polynomial a =", "of the Legendre polynomial a = linspace(3,4*N-1,N)/(4*N+2) x = cos(pi*a+1/(8*N*N*tan(a))) # Find roots", "= linspace(3,4*N-1,N)/(4*N+2) x = cos(pi*a+1/(8*N*N*tan(a))) # Find roots using Newton's method epsilon =", "# Find roots using Newton's method epsilon = 1e-15 delta = 1.0 while", "using Newton's method epsilon = 1e-15 delta = 1.0 while delta>epsilon: p0 =", "delta>epsilon: p0 = ones(N,float) p1 = copy(x) for k in range(1,N): p0,p1 =", "copy(x) for k in range(1,N): p0,p1 = p1,((2*k+1)*x*p1-k*p0)/(k+1) dp = (N+1)*(p0-x*p1)/(1-x*x) dx =", "gaussxw(N): # Initial approximation to roots of the Legendre polynomial a = linspace(3,4*N-1,N)/(4*N+2)", "roots of the Legendre polynomial a = linspace(3,4*N-1,N)/(4*N+2) x = cos(pi*a+1/(8*N*N*tan(a))) # Find", "range(1,N): p0,p1 = p1,((2*k+1)*x*p1-k*p0)/(k+1) dp = (N+1)*(p0-x*p1)/(1-x*x) dx = p1/dp x -= dx", "roots using Newton's method epsilon = 1e-15 delta = 1.0 while delta>epsilon: p0", "Newton's method epsilon = 1e-15 delta = 1.0 while delta>epsilon: p0 = ones(N,float)", "= max(abs(dx)) # Calculate the weights w = 2*(N+1)*(N+1)/(N*N*(1-x*x)*dp*dp) return x,w def gaussxwab(N,a,b):", "Find roots using Newton's method epsilon = 1e-15 delta = 1.0 while delta>epsilon:", "p1 = copy(x) for k in range(1,N): p0,p1 = p1,((2*k+1)*x*p1-k*p0)/(k+1) dp = (N+1)*(p0-x*p1)/(1-x*x)", "pylab import * def gaussxw(N): # Initial approximation to roots of the Legendre", "(N+1)*(p0-x*p1)/(1-x*x) dx = p1/dp x -= dx delta = max(abs(dx)) # Calculate the", "weights w = 2*(N+1)*(N+1)/(N*N*(1-x*x)*dp*dp) return x,w def gaussxwab(N,a,b): x,w = gaussxw(N) return 0.5*(b-a)*x+0.5*(b+a),0.5*(b-a)*w", "max(abs(dx)) # Calculate the weights w = 2*(N+1)*(N+1)/(N*N*(1-x*x)*dp*dp) return x,w def gaussxwab(N,a,b): x,w", "= 1.0 while delta>epsilon: p0 = ones(N,float) p1 = copy(x) for k in", "= copy(x) for k in range(1,N): p0,p1 = p1,((2*k+1)*x*p1-k*p0)/(k+1) dp = (N+1)*(p0-x*p1)/(1-x*x) dx", "Legendre polynomial a = linspace(3,4*N-1,N)/(4*N+2) x = cos(pi*a+1/(8*N*N*tan(a))) # Find roots using Newton's", "in range(1,N): p0,p1 = p1,((2*k+1)*x*p1-k*p0)/(k+1) dp = (N+1)*(p0-x*p1)/(1-x*x) dx = p1/dp x -=", "p1,((2*k+1)*x*p1-k*p0)/(k+1) dp = (N+1)*(p0-x*p1)/(1-x*x) dx = p1/dp x -= dx delta = max(abs(dx))", "the Legendre polynomial a = linspace(3,4*N-1,N)/(4*N+2) x = cos(pi*a+1/(8*N*N*tan(a))) # Find roots using", "x -= dx delta = max(abs(dx)) # Calculate the weights w = 2*(N+1)*(N+1)/(N*N*(1-x*x)*dp*dp)", "a = linspace(3,4*N-1,N)/(4*N+2) x = cos(pi*a+1/(8*N*N*tan(a))) # Find roots using Newton's method epsilon", "1e-15 delta = 1.0 while delta>epsilon: p0 = ones(N,float) p1 = copy(x) for", "the weights w = 2*(N+1)*(N+1)/(N*N*(1-x*x)*dp*dp) return x,w def gaussxwab(N,a,b): x,w = gaussxw(N) return", "= p1/dp x -= dx delta = max(abs(dx)) # Calculate the weights w", "delta = 1.0 while delta>epsilon: p0 = ones(N,float) p1 = copy(x) for k", "Calculate the weights w = 2*(N+1)*(N+1)/(N*N*(1-x*x)*dp*dp) return x,w def gaussxwab(N,a,b): x,w = gaussxw(N)", "= ones(N,float) p1 = copy(x) for k in range(1,N): p0,p1 = p1,((2*k+1)*x*p1-k*p0)/(k+1) dp", "k in range(1,N): p0,p1 = p1,((2*k+1)*x*p1-k*p0)/(k+1) dp = (N+1)*(p0-x*p1)/(1-x*x) dx = p1/dp x", "polynomial a = linspace(3,4*N-1,N)/(4*N+2) x = cos(pi*a+1/(8*N*N*tan(a))) # Find roots using Newton's method", "dp = (N+1)*(p0-x*p1)/(1-x*x) dx = p1/dp x -= dx delta = max(abs(dx)) #", "# Calculate the weights w = 2*(N+1)*(N+1)/(N*N*(1-x*x)*dp*dp) return x,w def gaussxwab(N,a,b): x,w =", "= p1,((2*k+1)*x*p1-k*p0)/(k+1) dp = (N+1)*(p0-x*p1)/(1-x*x) dx = p1/dp x -= dx delta =", "p1/dp x -= dx delta = max(abs(dx)) # Calculate the weights w =", "for k in range(1,N): p0,p1 = p1,((2*k+1)*x*p1-k*p0)/(k+1) dp = (N+1)*(p0-x*p1)/(1-x*x) dx = p1/dp", "epsilon = 1e-15 delta = 1.0 while delta>epsilon: p0 = ones(N,float) p1 =", "= (N+1)*(p0-x*p1)/(1-x*x) dx = p1/dp x -= dx delta = max(abs(dx)) # Calculate", "Initial approximation to roots of the Legendre polynomial a = linspace(3,4*N-1,N)/(4*N+2) x =", "linspace(3,4*N-1,N)/(4*N+2) x = cos(pi*a+1/(8*N*N*tan(a))) # Find roots using Newton's method epsilon = 1e-15", "# Initial approximation to roots of the Legendre polynomial a = linspace(3,4*N-1,N)/(4*N+2) x" ]
[ "the ITEM_PIPELINES setting # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html import pymongo from scrapy.conf import settings class", "import pymongo from scrapy.conf import settings class BaiduPipeline(object): def __init__(self): host = settings['MONGODB_HOST']", "# Define your item pipelines here # # Don't forget to add your", "http://doc.scrapy.org/en/latest/topics/item-pipeline.html import pymongo from scrapy.conf import settings class BaiduPipeline(object): def __init__(self): host =", "port = settings['MONGODB_PORT'] db_name = settings['MONGODB_DBNAME'] client = pymongo.MongoClient(host=host, port=port) db = client[db_name]", "your pipeline to the ITEM_PIPELINES setting # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html import pymongo from scrapy.conf", "import settings class BaiduPipeline(object): def __init__(self): host = settings['MONGODB_HOST'] port = settings['MONGODB_PORT'] db_name", "# # Don't forget to add your pipeline to the ITEM_PIPELINES setting #", "add your pipeline to the ITEM_PIPELINES setting # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html import pymongo from", "Define your item pipelines here # # Don't forget to add your pipeline", "your item pipelines here # # Don't forget to add your pipeline to", "# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html import pymongo from scrapy.conf import settings class BaiduPipeline(object): def __init__(self):", "-*- coding: utf-8 -*- # Define your item pipelines here # # Don't", "BaiduPipeline(object): def __init__(self): host = settings['MONGODB_HOST'] port = settings['MONGODB_PORT'] db_name = settings['MONGODB_DBNAME'] client", "pymongo.MongoClient(host=host, port=port) db = client[db_name] self.post = db[settings['MONGODB_DOCNAME']] def process_item(self, item, spider): person_info", "settings['MONGODB_DBNAME'] client = pymongo.MongoClient(host=host, port=port) db = client[db_name] self.post = db[settings['MONGODB_DOCNAME']] def process_item(self,", "# -*- coding: utf-8 -*- # Define your item pipelines here # #", "client[db_name] self.post = db[settings['MONGODB_DOCNAME']] def process_item(self, item, spider): person_info = dict(item) self.post.insert(person_info) return", "utf-8 -*- # Define your item pipelines here # # Don't forget to", "pymongo from scrapy.conf import settings class BaiduPipeline(object): def __init__(self): host = settings['MONGODB_HOST'] port", "pipelines here # # Don't forget to add your pipeline to the ITEM_PIPELINES", "def __init__(self): host = settings['MONGODB_HOST'] port = settings['MONGODB_PORT'] db_name = settings['MONGODB_DBNAME'] client =", "from scrapy.conf import settings class BaiduPipeline(object): def __init__(self): host = settings['MONGODB_HOST'] port =", "settings['MONGODB_HOST'] port = settings['MONGODB_PORT'] db_name = settings['MONGODB_DBNAME'] client = pymongo.MongoClient(host=host, port=port) db =", "ITEM_PIPELINES setting # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html import pymongo from scrapy.conf import settings class BaiduPipeline(object):", "item pipelines here # # Don't forget to add your pipeline to the", "db = client[db_name] self.post = db[settings['MONGODB_DOCNAME']] def process_item(self, item, spider): person_info = dict(item)", "= client[db_name] self.post = db[settings['MONGODB_DOCNAME']] def process_item(self, item, spider): person_info = dict(item) self.post.insert(person_info)", "here # # Don't forget to add your pipeline to the ITEM_PIPELINES setting", "# Don't forget to add your pipeline to the ITEM_PIPELINES setting # See:", "__init__(self): host = settings['MONGODB_HOST'] port = settings['MONGODB_PORT'] db_name = settings['MONGODB_DBNAME'] client = pymongo.MongoClient(host=host,", "to add your pipeline to the ITEM_PIPELINES setting # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html import pymongo", "class BaiduPipeline(object): def __init__(self): host = settings['MONGODB_HOST'] port = settings['MONGODB_PORT'] db_name = settings['MONGODB_DBNAME']", "host = settings['MONGODB_HOST'] port = settings['MONGODB_PORT'] db_name = settings['MONGODB_DBNAME'] client = pymongo.MongoClient(host=host, port=port)", "client = pymongo.MongoClient(host=host, port=port) db = client[db_name] self.post = db[settings['MONGODB_DOCNAME']] def process_item(self, item,", "-*- # Define your item pipelines here # # Don't forget to add", "Don't forget to add your pipeline to the ITEM_PIPELINES setting # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html", "See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html import pymongo from scrapy.conf import settings class BaiduPipeline(object): def __init__(self): host", "db_name = settings['MONGODB_DBNAME'] client = pymongo.MongoClient(host=host, port=port) db = client[db_name] self.post = db[settings['MONGODB_DOCNAME']]", "coding: utf-8 -*- # Define your item pipelines here # # Don't forget", "port=port) db = client[db_name] self.post = db[settings['MONGODB_DOCNAME']] def process_item(self, item, spider): person_info =", "scrapy.conf import settings class BaiduPipeline(object): def __init__(self): host = settings['MONGODB_HOST'] port = settings['MONGODB_PORT']", "= settings['MONGODB_DBNAME'] client = pymongo.MongoClient(host=host, port=port) db = client[db_name] self.post = db[settings['MONGODB_DOCNAME']] def", "= pymongo.MongoClient(host=host, port=port) db = client[db_name] self.post = db[settings['MONGODB_DOCNAME']] def process_item(self, item, spider):", "to the ITEM_PIPELINES setting # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html import pymongo from scrapy.conf import settings", "self.post = db[settings['MONGODB_DOCNAME']] def process_item(self, item, spider): person_info = dict(item) self.post.insert(person_info) return item", "settings['MONGODB_PORT'] db_name = settings['MONGODB_DBNAME'] client = pymongo.MongoClient(host=host, port=port) db = client[db_name] self.post =", "setting # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html import pymongo from scrapy.conf import settings class BaiduPipeline(object): def", "settings class BaiduPipeline(object): def __init__(self): host = settings['MONGODB_HOST'] port = settings['MONGODB_PORT'] db_name =", "pipeline to the ITEM_PIPELINES setting # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html import pymongo from scrapy.conf import", "= settings['MONGODB_PORT'] db_name = settings['MONGODB_DBNAME'] client = pymongo.MongoClient(host=host, port=port) db = client[db_name] self.post", "= settings['MONGODB_HOST'] port = settings['MONGODB_PORT'] db_name = settings['MONGODB_DBNAME'] client = pymongo.MongoClient(host=host, port=port) db", "forget to add your pipeline to the ITEM_PIPELINES setting # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html import" ]
[ "= os.read(f, l) else: s = os.read(f, l).decode() return s def readw(self, want,", "s = os.read(f, l).decode() return s def readw(self, want, tries=100): i = 0", "cmds): p = Popen(shlex.split(cmds), stdout=PIPE) if sys.version_info.major == 2: return p.stdout.read().strip() else: return", "\"\" for f, e in es: if e & select.EPOLLIN: if sys.version_info.major ==", "tries: line = self.read() if want in line: return r + line r", "CexecCmd(object): def __init__(self): pass def cmd(self, cmds): p = Popen(shlex.split(cmds), stdout=PIPE) if sys.version_info.major", "liaozhaoyan date: 2022/3/19 ------------------------------------------------- Change Activity: 2022/3/19: ------------------------------------------------- \"\"\" __author__ = 'liaozhaoyan' import", "def cmd(self, cmds): p = Popen(shlex.split(cmds), stdout=PIPE) if sys.version_info.major == 2: return p.stdout.read().strip()", "2022/3/19: ------------------------------------------------- \"\"\" __author__ = 'liaozhaoyan' import os import sys import shlex from", "def __init__(self): pass def cmd(self, cmds): p = Popen(shlex.split(cmds), stdout=PIPE) if sys.version_info.major ==", "if want in line: return r + line r += line i +=", "self).__init__() self.daemon = True # thread dies with the program self.__pipe = open(f,", "return s def readw(self, want, tries=100): i = 0 r = \"\" while", "2: return p.stdout.read().strip() else: return p.stdout.read().decode().strip() def system(self, cmds): cmds = cmds.replace('\\0', '').strip()", "import sys import shlex from subprocess import PIPE, Popen from threading import Thread", "return r + line r += line i += 1 raise Exception(\"get want", "IOError: return -1 def writeLine(self, cmd): self.write(cmd + \"\\n\") def read(self, tmout=0.2, l=16384):", "= \"\" while i < tries: line = self.read() if want in line:", "while i < tries: line = self.read() if want in line: return r", "def terminate(self): self.__loop = False self.join(1) class CexecCmd(object): def __init__(self): pass def cmd(self,", "utf-8 -*- \"\"\" ------------------------------------------------- File Name: execCmd Description : Author : liaozhaoyan date:", "self.__loop = True self.start() def newCb(self, func): self.__callBack = func def run(self): while", "= self.__e.poll(tmout) if not es: return \"\" for f, e in es: if", "thread dies with the program self.__pipe = open(f, 'r') self.__loop = True self.start()", "is not exist.\" % f) self.__callBack = func super(CasyncPipe, self).__init__() self.daemon = True", "f, e in es: if e & select.EPOLLIN: if sys.version_info.major == 2: s", "def readw(self, want, tries=100): i = 0 r = \"\" while i <", "+= line i += 1 raise Exception(\"get want args %s overtimes\" % want)", "import PIPE, Popen from threading import Thread import select from .surfException import FileNotExistException", "func def run(self): while self.__loop: line = self.__pipe.readline().strip() self.__callBack(line) def terminate(self): self.__loop =", "select.epoll() self.__e.register(self.__p.stdout.fileno(), select.EPOLLIN) def __del__(self): self.__p.kill() def write(self, cmd): try: self.__p.stdin.write(cmd.encode()) self.__p.stdin.flush() except", "es = self.__e.poll(tmout) if not es: return \"\" for f, e in es:", "func): self.__callBack = func def run(self): while self.__loop: line = self.__pipe.readline().strip() self.__callBack(line) def", "% f) self.__callBack = func super(CasyncPipe, self).__init__() self.daemon = True # thread dies", "overtimes\" % want) def terminate(self): self.__p.terminate() return self.__p.wait() if __name__ == \"__main__\": pass", "import FileNotExistException ON_POSIX = 'posix' in sys.builtin_module_names class CasyncPipe(Thread): def __init__(self, f, func):", "= select.epoll() self.__e.register(self.__p.stdout.fileno(), select.EPOLLIN) def __del__(self): self.__p.kill() def write(self, cmd): try: self.__p.stdin.write(cmd.encode()) self.__p.stdin.flush()", "self.__pipe = open(f, 'r') self.__loop = True self.start() def newCb(self, func): self.__callBack =", "def writeLine(self, cmd): self.write(cmd + \"\\n\") def read(self, tmout=0.2, l=16384): while True: es", "self.__loop: line = self.__pipe.readline().strip() self.__callBack(line) def terminate(self): self.__loop = False self.join(1) class CexecCmd(object):", "sys.version_info.major == 2: s = os.read(f, l) else: s = os.read(f, l).decode() return", "readw(self, want, tries=100): i = 0 r = \"\" while i < tries:", "= 'liaozhaoyan' import os import sys import shlex from subprocess import PIPE, Popen", "True self.start() def newCb(self, func): self.__callBack = func def run(self): while self.__loop: line", "if sys.version_info.major == 2: return p.stdout.read().strip() else: return p.stdout.read().decode().strip() def system(self, cmds): cmds", "program self.__p = Popen(shlex.split(cmd), stdout=PIPE, stdin=PIPE, close_fds=ON_POSIX) self.__e = select.epoll() self.__e.register(self.__p.stdout.fileno(), select.EPOLLIN) def", "Popen from threading import Thread import select from .surfException import FileNotExistException ON_POSIX =", "line = self.__pipe.readline().strip() self.__callBack(line) def terminate(self): self.__loop = False self.join(1) class CexecCmd(object): def", "cmd): super(CasyncCmdQue, self).__init__() self.daemon = True # thread dies with the program self.__p", "for f, e in es: if e & select.EPOLLIN: if sys.version_info.major == 2:", "os.read(f, l).decode() return s def readw(self, want, tries=100): i = 0 r =", "tmout=0.2, l=16384): while True: es = self.__e.poll(tmout) if not es: return \"\" for", "\"\" while i < tries: line = self.read() if want in line: return", "p.stdout.read().strip() else: return p.stdout.read().decode().strip() def system(self, cmds): cmds = cmds.replace('\\0', '').strip() return os.popen(cmds).read(8192)", "Popen(shlex.split(cmd), stdout=PIPE, stdin=PIPE, close_fds=ON_POSIX) self.__e = select.epoll() self.__e.register(self.__p.stdout.fileno(), select.EPOLLIN) def __del__(self): self.__p.kill() def", "in line: return r + line r += line i += 1 raise", "not exist.\" % f) self.__callBack = func super(CasyncPipe, self).__init__() self.daemon = True #", "p.stdout.read().decode().strip() def system(self, cmds): cmds = cmds.replace('\\0', '').strip() return os.popen(cmds).read(8192) class CasyncCmdQue(object): def", "line r += line i += 1 raise Exception(\"get want args %s overtimes\"", "= 'posix' in sys.builtin_module_names class CasyncPipe(Thread): def __init__(self, f, func): if not os.path.exists(f):", "class CasyncCmdQue(object): def __init__(self, cmd): super(CasyncCmdQue, self).__init__() self.daemon = True # thread dies", "cmd(self, cmds): p = Popen(shlex.split(cmds), stdout=PIPE) if sys.version_info.major == 2: return p.stdout.read().strip() else:", "\"\\n\") def read(self, tmout=0.2, l=16384): while True: es = self.__e.poll(tmout) if not es:", "line = self.read() if want in line: return r + line r +=", "l).decode() return s def readw(self, want, tries=100): i = 0 r = \"\"", "run(self): while self.__loop: line = self.__pipe.readline().strip() self.__callBack(line) def terminate(self): self.__loop = False self.join(1)", "cmd): try: self.__p.stdin.write(cmd.encode()) self.__p.stdin.flush() except IOError: return -1 def writeLine(self, cmd): self.write(cmd +", "coding: utf-8 -*- \"\"\" ------------------------------------------------- File Name: execCmd Description : Author : liaozhaoyan", "Exception(\"get want args %s overtimes\" % want) def terminate(self): self.__p.terminate() return self.__p.wait() if", "Thread import select from .surfException import FileNotExistException ON_POSIX = 'posix' in sys.builtin_module_names class", "in es: if e & select.EPOLLIN: if sys.version_info.major == 2: s = os.read(f,", "line i += 1 raise Exception(\"get want args %s overtimes\" % want) def", "File Name: execCmd Description : Author : liaozhaoyan date: 2022/3/19 ------------------------------------------------- Change Activity:", "Activity: 2022/3/19: ------------------------------------------------- \"\"\" __author__ = 'liaozhaoyan' import os import sys import shlex", ".surfException import FileNotExistException ON_POSIX = 'posix' in sys.builtin_module_names class CasyncPipe(Thread): def __init__(self, f,", "sys.builtin_module_names class CasyncPipe(Thread): def __init__(self, f, func): if not os.path.exists(f): FileNotExistException(\"%s is not", "def newCb(self, func): self.__callBack = func def run(self): while self.__loop: line = self.__pipe.readline().strip()", "__init__(self, f, func): if not os.path.exists(f): FileNotExistException(\"%s is not exist.\" % f) self.__callBack", "from subprocess import PIPE, Popen from threading import Thread import select from .surfException", "self.__e = select.epoll() self.__e.register(self.__p.stdout.fileno(), select.EPOLLIN) def __del__(self): self.__p.kill() def write(self, cmd): try: self.__p.stdin.write(cmd.encode())", "------------------------------------------------- \"\"\" __author__ = 'liaozhaoyan' import os import sys import shlex from subprocess", "the program self.__pipe = open(f, 'r') self.__loop = True self.start() def newCb(self, func):", "cmds = cmds.replace('\\0', '').strip() return os.popen(cmds).read(8192) class CasyncCmdQue(object): def __init__(self, cmd): super(CasyncCmdQue, self).__init__()", "def system(self, cmds): cmds = cmds.replace('\\0', '').strip() return os.popen(cmds).read(8192) class CasyncCmdQue(object): def __init__(self,", "+= 1 raise Exception(\"get want args %s overtimes\" % want) def terminate(self): self.__p.terminate()", "%s overtimes\" % want) def terminate(self): self.__p.terminate() return self.__p.wait() if __name__ == \"__main__\":", "thread dies with the program self.__p = Popen(shlex.split(cmd), stdout=PIPE, stdin=PIPE, close_fds=ON_POSIX) self.__e =", "r = \"\" while i < tries: line = self.read() if want in", "p = Popen(shlex.split(cmds), stdout=PIPE) if sys.version_info.major == 2: return p.stdout.read().strip() else: return p.stdout.read().decode().strip()", "return p.stdout.read().strip() else: return p.stdout.read().decode().strip() def system(self, cmds): cmds = cmds.replace('\\0', '').strip() return", "os.path.exists(f): FileNotExistException(\"%s is not exist.\" % f) self.__callBack = func super(CasyncPipe, self).__init__() self.daemon", "with the program self.__pipe = open(f, 'r') self.__loop = True self.start() def newCb(self,", "= Popen(shlex.split(cmd), stdout=PIPE, stdin=PIPE, close_fds=ON_POSIX) self.__e = select.epoll() self.__e.register(self.__p.stdout.fileno(), select.EPOLLIN) def __del__(self): self.__p.kill()", "__del__(self): self.__p.kill() def write(self, cmd): try: self.__p.stdin.write(cmd.encode()) self.__p.stdin.flush() except IOError: return -1 def", "__init__(self, cmd): super(CasyncCmdQue, self).__init__() self.daemon = True # thread dies with the program", "+ line r += line i += 1 raise Exception(\"get want args %s", "want args %s overtimes\" % want) def terminate(self): self.__p.terminate() return self.__p.wait() if __name__", "= True # thread dies with the program self.__pipe = open(f, 'r') self.__loop", "newCb(self, func): self.__callBack = func def run(self): while self.__loop: line = self.__pipe.readline().strip() self.__callBack(line)", "read(self, tmout=0.2, l=16384): while True: es = self.__e.poll(tmout) if not es: return \"\"", "if e & select.EPOLLIN: if sys.version_info.major == 2: s = os.read(f, l) else:", "import shlex from subprocess import PIPE, Popen from threading import Thread import select", "self.__p.stdin.write(cmd.encode()) self.__p.stdin.flush() except IOError: return -1 def writeLine(self, cmd): self.write(cmd + \"\\n\") def", "< tries: line = self.read() if want in line: return r + line", "------------------------------------------------- File Name: execCmd Description : Author : liaozhaoyan date: 2022/3/19 ------------------------------------------------- Change", "open(f, 'r') self.__loop = True self.start() def newCb(self, func): self.__callBack = func def", "except IOError: return -1 def writeLine(self, cmd): self.write(cmd + \"\\n\") def read(self, tmout=0.2,", "= func super(CasyncPipe, self).__init__() self.daemon = True # thread dies with the program", "s = os.read(f, l) else: s = os.read(f, l).decode() return s def readw(self,", "True # thread dies with the program self.__pipe = open(f, 'r') self.__loop =", "system(self, cmds): cmds = cmds.replace('\\0', '').strip() return os.popen(cmds).read(8192) class CasyncCmdQue(object): def __init__(self, cmd):", "# thread dies with the program self.__pipe = open(f, 'r') self.__loop = True", "stdin=PIPE, close_fds=ON_POSIX) self.__e = select.epoll() self.__e.register(self.__p.stdout.fileno(), select.EPOLLIN) def __del__(self): self.__p.kill() def write(self, cmd):", "self.__pipe.readline().strip() self.__callBack(line) def terminate(self): self.__loop = False self.join(1) class CexecCmd(object): def __init__(self): pass", "0 r = \"\" while i < tries: line = self.read() if want", "dies with the program self.__p = Popen(shlex.split(cmd), stdout=PIPE, stdin=PIPE, close_fds=ON_POSIX) self.__e = select.epoll()", "execCmd Description : Author : liaozhaoyan date: 2022/3/19 ------------------------------------------------- Change Activity: 2022/3/19: -------------------------------------------------", "self.__callBack = func super(CasyncPipe, self).__init__() self.daemon = True # thread dies with the", "import select from .surfException import FileNotExistException ON_POSIX = 'posix' in sys.builtin_module_names class CasyncPipe(Thread):", "i < tries: line = self.read() if want in line: return r +", "os.popen(cmds).read(8192) class CasyncCmdQue(object): def __init__(self, cmd): super(CasyncCmdQue, self).__init__() self.daemon = True # thread", "r += line i += 1 raise Exception(\"get want args %s overtimes\" %", "\"\"\" __author__ = 'liaozhaoyan' import os import sys import shlex from subprocess import", "in sys.builtin_module_names class CasyncPipe(Thread): def __init__(self, f, func): if not os.path.exists(f): FileNotExistException(\"%s is", "self.__p.kill() def write(self, cmd): try: self.__p.stdin.write(cmd.encode()) self.__p.stdin.flush() except IOError: return -1 def writeLine(self,", "try: self.__p.stdin.write(cmd.encode()) self.__p.stdin.flush() except IOError: return -1 def writeLine(self, cmd): self.write(cmd + \"\\n\")", "class CasyncPipe(Thread): def __init__(self, f, func): if not os.path.exists(f): FileNotExistException(\"%s is not exist.\"", "select.EPOLLIN) def __del__(self): self.__p.kill() def write(self, cmd): try: self.__p.stdin.write(cmd.encode()) self.__p.stdin.flush() except IOError: return", "PIPE, Popen from threading import Thread import select from .surfException import FileNotExistException ON_POSIX", "l) else: s = os.read(f, l).decode() return s def readw(self, want, tries=100): i", "threading import Thread import select from .surfException import FileNotExistException ON_POSIX = 'posix' in", "2022/3/19 ------------------------------------------------- Change Activity: 2022/3/19: ------------------------------------------------- \"\"\" __author__ = 'liaozhaoyan' import os import", "terminate(self): self.__loop = False self.join(1) class CexecCmd(object): def __init__(self): pass def cmd(self, cmds):", "not es: return \"\" for f, e in es: if e & select.EPOLLIN:", "i = 0 r = \"\" while i < tries: line = self.read()", "\"\"\" ------------------------------------------------- File Name: execCmd Description : Author : liaozhaoyan date: 2022/3/19 -------------------------------------------------", "stdout=PIPE) if sys.version_info.major == 2: return p.stdout.read().strip() else: return p.stdout.read().decode().strip() def system(self, cmds):", "= cmds.replace('\\0', '').strip() return os.popen(cmds).read(8192) class CasyncCmdQue(object): def __init__(self, cmd): super(CasyncCmdQue, self).__init__() self.daemon", "e in es: if e & select.EPOLLIN: if sys.version_info.major == 2: s =", "True # thread dies with the program self.__p = Popen(shlex.split(cmd), stdout=PIPE, stdin=PIPE, close_fds=ON_POSIX)", "self.__p = Popen(shlex.split(cmd), stdout=PIPE, stdin=PIPE, close_fds=ON_POSIX) self.__e = select.epoll() self.__e.register(self.__p.stdout.fileno(), select.EPOLLIN) def __del__(self):", "= open(f, 'r') self.__loop = True self.start() def newCb(self, func): self.__callBack = func", "if not es: return \"\" for f, e in es: if e &", "Description : Author : liaozhaoyan date: 2022/3/19 ------------------------------------------------- Change Activity: 2022/3/19: ------------------------------------------------- \"\"\"", "os import sys import shlex from subprocess import PIPE, Popen from threading import", "super(CasyncCmdQue, self).__init__() self.daemon = True # thread dies with the program self.__p =", "if sys.version_info.major == 2: s = os.read(f, l) else: s = os.read(f, l).decode()", "------------------------------------------------- Change Activity: 2022/3/19: ------------------------------------------------- \"\"\" __author__ = 'liaozhaoyan' import os import sys", "return os.popen(cmds).read(8192) class CasyncCmdQue(object): def __init__(self, cmd): super(CasyncCmdQue, self).__init__() self.daemon = True #", "not os.path.exists(f): FileNotExistException(\"%s is not exist.\" % f) self.__callBack = func super(CasyncPipe, self).__init__()", "Author : liaozhaoyan date: 2022/3/19 ------------------------------------------------- Change Activity: 2022/3/19: ------------------------------------------------- \"\"\" __author__ =", "func): if not os.path.exists(f): FileNotExistException(\"%s is not exist.\" % f) self.__callBack = func", "write(self, cmd): try: self.__p.stdin.write(cmd.encode()) self.__p.stdin.flush() except IOError: return -1 def writeLine(self, cmd): self.write(cmd", "self.daemon = True # thread dies with the program self.__p = Popen(shlex.split(cmd), stdout=PIPE,", "CasyncPipe(Thread): def __init__(self, f, func): if not os.path.exists(f): FileNotExistException(\"%s is not exist.\" %", "CasyncCmdQue(object): def __init__(self, cmd): super(CasyncCmdQue, self).__init__() self.daemon = True # thread dies with", "False self.join(1) class CexecCmd(object): def __init__(self): pass def cmd(self, cmds): p = Popen(shlex.split(cmds),", "e & select.EPOLLIN: if sys.version_info.major == 2: s = os.read(f, l) else: s", "line: return r + line r += line i += 1 raise Exception(\"get", "= Popen(shlex.split(cmds), stdout=PIPE) if sys.version_info.major == 2: return p.stdout.read().strip() else: return p.stdout.read().decode().strip() def", "1 raise Exception(\"get want args %s overtimes\" % want) def terminate(self): self.__p.terminate() return", "import os import sys import shlex from subprocess import PIPE, Popen from threading", "from .surfException import FileNotExistException ON_POSIX = 'posix' in sys.builtin_module_names class CasyncPipe(Thread): def __init__(self,", "self.__callBack = func def run(self): while self.__loop: line = self.__pipe.readline().strip() self.__callBack(line) def terminate(self):", "2: s = os.read(f, l) else: s = os.read(f, l).decode() return s def", "cmds): cmds = cmds.replace('\\0', '').strip() return os.popen(cmds).read(8192) class CasyncCmdQue(object): def __init__(self, cmd): super(CasyncCmdQue,", "self.join(1) class CexecCmd(object): def __init__(self): pass def cmd(self, cmds): p = Popen(shlex.split(cmds), stdout=PIPE)", "self.__e.register(self.__p.stdout.fileno(), select.EPOLLIN) def __del__(self): self.__p.kill() def write(self, cmd): try: self.__p.stdin.write(cmd.encode()) self.__p.stdin.flush() except IOError:", "def run(self): while self.__loop: line = self.__pipe.readline().strip() self.__callBack(line) def terminate(self): self.__loop = False", "# -*- coding: utf-8 -*- \"\"\" ------------------------------------------------- File Name: execCmd Description : Author", "self.start() def newCb(self, func): self.__callBack = func def run(self): while self.__loop: line =", "args %s overtimes\" % want) def terminate(self): self.__p.terminate() return self.__p.wait() if __name__ ==", "self.daemon = True # thread dies with the program self.__pipe = open(f, 'r')", "i += 1 raise Exception(\"get want args %s overtimes\" % want) def terminate(self):", "__author__ = 'liaozhaoyan' import os import sys import shlex from subprocess import PIPE,", ": Author : liaozhaoyan date: 2022/3/19 ------------------------------------------------- Change Activity: 2022/3/19: ------------------------------------------------- \"\"\" __author__", "= self.read() if want in line: return r + line r += line", "return \"\" for f, e in es: if e & select.EPOLLIN: if sys.version_info.major", "s def readw(self, want, tries=100): i = 0 r = \"\" while i", "= os.read(f, l).decode() return s def readw(self, want, tries=100): i = 0 r", "the program self.__p = Popen(shlex.split(cmd), stdout=PIPE, stdin=PIPE, close_fds=ON_POSIX) self.__e = select.epoll() self.__e.register(self.__p.stdout.fileno(), select.EPOLLIN)", "shlex from subprocess import PIPE, Popen from threading import Thread import select from", "import Thread import select from .surfException import FileNotExistException ON_POSIX = 'posix' in sys.builtin_module_names", "pass def cmd(self, cmds): p = Popen(shlex.split(cmds), stdout=PIPE) if sys.version_info.major == 2: return", "cmd): self.write(cmd + \"\\n\") def read(self, tmout=0.2, l=16384): while True: es = self.__e.poll(tmout)", "+ \"\\n\") def read(self, tmout=0.2, l=16384): while True: es = self.__e.poll(tmout) if not", "# thread dies with the program self.__p = Popen(shlex.split(cmd), stdout=PIPE, stdin=PIPE, close_fds=ON_POSIX) self.__e", "es: if e & select.EPOLLIN: if sys.version_info.major == 2: s = os.read(f, l)", "= 0 r = \"\" while i < tries: line = self.read() if", "return p.stdout.read().decode().strip() def system(self, cmds): cmds = cmds.replace('\\0', '').strip() return os.popen(cmds).read(8192) class CasyncCmdQue(object):", "while self.__loop: line = self.__pipe.readline().strip() self.__callBack(line) def terminate(self): self.__loop = False self.join(1) class", "'posix' in sys.builtin_module_names class CasyncPipe(Thread): def __init__(self, f, func): if not os.path.exists(f): FileNotExistException(\"%s", "with the program self.__p = Popen(shlex.split(cmd), stdout=PIPE, stdin=PIPE, close_fds=ON_POSIX) self.__e = select.epoll() self.__e.register(self.__p.stdout.fileno(),", "stdout=PIPE, stdin=PIPE, close_fds=ON_POSIX) self.__e = select.epoll() self.__e.register(self.__p.stdout.fileno(), select.EPOLLIN) def __del__(self): self.__p.kill() def write(self,", "def __del__(self): self.__p.kill() def write(self, cmd): try: self.__p.stdin.write(cmd.encode()) self.__p.stdin.flush() except IOError: return -1", "-*- \"\"\" ------------------------------------------------- File Name: execCmd Description : Author : liaozhaoyan date: 2022/3/19", "def __init__(self, cmd): super(CasyncCmdQue, self).__init__() self.daemon = True # thread dies with the", "Name: execCmd Description : Author : liaozhaoyan date: 2022/3/19 ------------------------------------------------- Change Activity: 2022/3/19:", "FileNotExistException(\"%s is not exist.\" % f) self.__callBack = func super(CasyncPipe, self).__init__() self.daemon =", "dies with the program self.__pipe = open(f, 'r') self.__loop = True self.start() def", "'r') self.__loop = True self.start() def newCb(self, func): self.__callBack = func def run(self):", "subprocess import PIPE, Popen from threading import Thread import select from .surfException import", "while True: es = self.__e.poll(tmout) if not es: return \"\" for f, e", "class CexecCmd(object): def __init__(self): pass def cmd(self, cmds): p = Popen(shlex.split(cmds), stdout=PIPE) if", "self.__p.stdin.flush() except IOError: return -1 def writeLine(self, cmd): self.write(cmd + \"\\n\") def read(self,", "= func def run(self): while self.__loop: line = self.__pipe.readline().strip() self.__callBack(line) def terminate(self): self.__loop", "def __init__(self, f, func): if not os.path.exists(f): FileNotExistException(\"%s is not exist.\" % f)", "ON_POSIX = 'posix' in sys.builtin_module_names class CasyncPipe(Thread): def __init__(self, f, func): if not", "FileNotExistException ON_POSIX = 'posix' in sys.builtin_module_names class CasyncPipe(Thread): def __init__(self, f, func): if", "self.__callBack(line) def terminate(self): self.__loop = False self.join(1) class CexecCmd(object): def __init__(self): pass def", "sys import shlex from subprocess import PIPE, Popen from threading import Thread import", "date: 2022/3/19 ------------------------------------------------- Change Activity: 2022/3/19: ------------------------------------------------- \"\"\" __author__ = 'liaozhaoyan' import os", "exist.\" % f) self.__callBack = func super(CasyncPipe, self).__init__() self.daemon = True # thread", "es: return \"\" for f, e in es: if e & select.EPOLLIN: if", "select from .surfException import FileNotExistException ON_POSIX = 'posix' in sys.builtin_module_names class CasyncPipe(Thread): def", "self.__e.poll(tmout) if not es: return \"\" for f, e in es: if e", "& select.EPOLLIN: if sys.version_info.major == 2: s = os.read(f, l) else: s =", "Popen(shlex.split(cmds), stdout=PIPE) if sys.version_info.major == 2: return p.stdout.read().strip() else: return p.stdout.read().decode().strip() def system(self,", "else: return p.stdout.read().decode().strip() def system(self, cmds): cmds = cmds.replace('\\0', '').strip() return os.popen(cmds).read(8192) class", "sys.version_info.major == 2: return p.stdout.read().strip() else: return p.stdout.read().decode().strip() def system(self, cmds): cmds =", "def read(self, tmout=0.2, l=16384): while True: es = self.__e.poll(tmout) if not es: return", "-*- coding: utf-8 -*- \"\"\" ------------------------------------------------- File Name: execCmd Description : Author :", "self.__loop = False self.join(1) class CexecCmd(object): def __init__(self): pass def cmd(self, cmds): p", "-1 def writeLine(self, cmd): self.write(cmd + \"\\n\") def read(self, tmout=0.2, l=16384): while True:", "'').strip() return os.popen(cmds).read(8192) class CasyncCmdQue(object): def __init__(self, cmd): super(CasyncCmdQue, self).__init__() self.daemon = True", "tries=100): i = 0 r = \"\" while i < tries: line =", ": liaozhaoyan date: 2022/3/19 ------------------------------------------------- Change Activity: 2022/3/19: ------------------------------------------------- \"\"\" __author__ = 'liaozhaoyan'", "True: es = self.__e.poll(tmout) if not es: return \"\" for f, e in", "Change Activity: 2022/3/19: ------------------------------------------------- \"\"\" __author__ = 'liaozhaoyan' import os import sys import", "f, func): if not os.path.exists(f): FileNotExistException(\"%s is not exist.\" % f) self.__callBack =", "l=16384): while True: es = self.__e.poll(tmout) if not es: return \"\" for f,", "else: s = os.read(f, l).decode() return s def readw(self, want, tries=100): i =", "raise Exception(\"get want args %s overtimes\" % want) def terminate(self): self.__p.terminate() return self.__p.wait()", "= False self.join(1) class CexecCmd(object): def __init__(self): pass def cmd(self, cmds): p =", "from threading import Thread import select from .surfException import FileNotExistException ON_POSIX = 'posix'", "= True self.start() def newCb(self, func): self.__callBack = func def run(self): while self.__loop:", "close_fds=ON_POSIX) self.__e = select.epoll() self.__e.register(self.__p.stdout.fileno(), select.EPOLLIN) def __del__(self): self.__p.kill() def write(self, cmd): try:", "= self.__pipe.readline().strip() self.__callBack(line) def terminate(self): self.__loop = False self.join(1) class CexecCmd(object): def __init__(self):", "self.write(cmd + \"\\n\") def read(self, tmout=0.2, l=16384): while True: es = self.__e.poll(tmout) if", "__init__(self): pass def cmd(self, cmds): p = Popen(shlex.split(cmds), stdout=PIPE) if sys.version_info.major == 2:", "program self.__pipe = open(f, 'r') self.__loop = True self.start() def newCb(self, func): self.__callBack", "return -1 def writeLine(self, cmd): self.write(cmd + \"\\n\") def read(self, tmout=0.2, l=16384): while", "self.read() if want in line: return r + line r += line i", "r + line r += line i += 1 raise Exception(\"get want args", "def write(self, cmd): try: self.__p.stdin.write(cmd.encode()) self.__p.stdin.flush() except IOError: return -1 def writeLine(self, cmd):", "if not os.path.exists(f): FileNotExistException(\"%s is not exist.\" % f) self.__callBack = func super(CasyncPipe,", "= True # thread dies with the program self.__p = Popen(shlex.split(cmd), stdout=PIPE, stdin=PIPE,", "f) self.__callBack = func super(CasyncPipe, self).__init__() self.daemon = True # thread dies with", "== 2: s = os.read(f, l) else: s = os.read(f, l).decode() return s", "self).__init__() self.daemon = True # thread dies with the program self.__p = Popen(shlex.split(cmd),", "== 2: return p.stdout.read().strip() else: return p.stdout.read().decode().strip() def system(self, cmds): cmds = cmds.replace('\\0',", "cmds.replace('\\0', '').strip() return os.popen(cmds).read(8192) class CasyncCmdQue(object): def __init__(self, cmd): super(CasyncCmdQue, self).__init__() self.daemon =", "func super(CasyncPipe, self).__init__() self.daemon = True # thread dies with the program self.__pipe", "writeLine(self, cmd): self.write(cmd + \"\\n\") def read(self, tmout=0.2, l=16384): while True: es =", "'liaozhaoyan' import os import sys import shlex from subprocess import PIPE, Popen from", "os.read(f, l) else: s = os.read(f, l).decode() return s def readw(self, want, tries=100):", "select.EPOLLIN: if sys.version_info.major == 2: s = os.read(f, l) else: s = os.read(f,", "super(CasyncPipe, self).__init__() self.daemon = True # thread dies with the program self.__pipe =", "want, tries=100): i = 0 r = \"\" while i < tries: line", "want in line: return r + line r += line i += 1" ]
[ "OneOfValidator(Validator): def __init__(self, *options): self.options = set(options) def validate(self, value): if value not", "self.options = set(options) def validate(self, value): if value not in self.options: raise ValueError(f'Expected", ".Validator import Validator class OneOfValidator(Validator): def __init__(self, *options): self.options = set(options) def validate(self,", "set(options) def validate(self, value): if value not in self.options: raise ValueError(f'Expected {value!r} to", "<reponame>meguia/virtualroom from .Validator import Validator class OneOfValidator(Validator): def __init__(self, *options): self.options = set(options)", "= set(options) def validate(self, value): if value not in self.options: raise ValueError(f'Expected {value!r}", "__init__(self, *options): self.options = set(options) def validate(self, value): if value not in self.options:", "def __init__(self, *options): self.options = set(options) def validate(self, value): if value not in", "def validate(self, value): if value not in self.options: raise ValueError(f'Expected {value!r} to be", "if value not in self.options: raise ValueError(f'Expected {value!r} to be one of {self.options!r}')", "Validator class OneOfValidator(Validator): def __init__(self, *options): self.options = set(options) def validate(self, value): if", "validate(self, value): if value not in self.options: raise ValueError(f'Expected {value!r} to be one", "*options): self.options = set(options) def validate(self, value): if value not in self.options: raise", "class OneOfValidator(Validator): def __init__(self, *options): self.options = set(options) def validate(self, value): if value", "value): if value not in self.options: raise ValueError(f'Expected {value!r} to be one of", "import Validator class OneOfValidator(Validator): def __init__(self, *options): self.options = set(options) def validate(self, value):", "from .Validator import Validator class OneOfValidator(Validator): def __init__(self, *options): self.options = set(options) def" ]
[ "# type: ignore[assignment] context_processors = [] from .serialization.context_processors import create_context_processor_type for engine in", "= [] from .serialization.context_processors import create_context_processor_type for engine in settings.TEMPLATES: if engine[\"BACKEND\"] ==", "code\") encoded_schema = schema.encode() import hashlib digest = hashlib.sha1(encoded_schema).hexdigest().encode() if skip_cache is False", "global_types, template_registry, type_registry, value_registry, ) from .serialization import create_schema logger = logging.getLogger(\"django.server\") def", "is unaware of their usage. So we register them in `globals` and force", "export any interface it sees. However, this can bloat our generated files. Instead,", "The package json-schema-to-typescript does expose a way to automatically export any interface it", "\"additionalProperties\": False, \"required\": list(global_types.keys()), \"properties\": global_types, }, }, }, } def get_templates() ->", "updates. # Maybe there's a way to force it to be a single", "and force `json-schema-to-typescript` to expose them. We can't just add these types to", "engine[\"BACKEND\"] == \"reactivated.backend.JSX\": context_processors.extend(engine[\"OPTIONS\"][\"context_processors\"]) # type: ignore[index] type_registry[\"Context\"] = create_context_processor_type(context_processors) ParentTuple = NamedTuple(\"ParentTuple\",", "import AppConfig from django.conf import settings from . import ( definitions_registry, extract_views_from_urlpatterns, global_types,", "exported by `json-schem-to-typescript` because they're referenced using `tsType`, so the libraary is unaware", "start. TODO: handle noreload. \"\"\" schema = get_schema() if ( os.environ.get(\"WERKZEUG_RUN_MAIN\") == \"true\"", "don't pass the file object to stdout, because otherwise # webpack gets confused", "once when generating the parent tuple. We could explore doing two passes in", "the parent tuple. We could explore doing two passes in the future. See", "[\"node\", \"./node_modules/reactivated/generator.js\"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, ) out, error = process.communicate(encoded_schema) os.makedirs(\"client/generated\", exist_ok=True) with open(\"client/generated/index.tsx\",", "= logging.getLogger(\"django.server\") def get_urls_schema() -> Dict[str, Any]: urlconf = importlib.import_module(settings.ROOT_URLCONF) urlpatterns = urlconf.urlpatterns", "Dict[str, Any]: urlconf = importlib.import_module(settings.ROOT_URLCONF) urlpatterns = urlconf.urlpatterns # type: ignore[attr-defined] from django.urls", "generating the parent tuple. We could explore doing two passes in the future.", "digest = hashlib.sha1(encoded_schema).hexdigest().encode() if skip_cache is False and os.path.exists(\"client/generated/index.tsx\"): with open(\"client/generated/index.tsx\", \"r+b\") as", "the `type_registry` because that's only parsed once when generating the parent tuple. We", "as nothing has changed\") return #: Note that we don't pass the file", "occasionally run into types that we want available globally but are not directly", "these types to the `type_registry` because that's only parsed once when generating the", "def generate_schema(schema: str, skip_cache: bool = False) -> None: \"\"\" For development usage", "confused with the half-written file when we make updates. # Maybe there's a", "requires Node and Python installed You can use this function for your E2E", "referenced by templates. These aren't exported by `json-schem-to-typescript` because they're referenced using `tsType`,", "\"templates\": get_templates(), \"types\": get_types_schema(), \"values\": get_values(), } return json.dumps(schema, indent=4) class ReactivatedConfig(AppConfig): name", "json import logging import os import subprocess from typing import Any, Dict, NamedTuple,", "write? I tried # open('w+b', buffering=0) but no luck. process = subprocess.Popen( [\"node\",", "actually starts twice. So we prevent generation on the first start. TODO: handle", "a way to automatically export any interface it sees. However, this can bloat", "so the libraary is unaware of their usage. So we register them in", "dev server after restarts or initial start. pass is_server_started = \"DJANGO_SEVER_STARTING\" in os.environ", "side code\") encoded_schema = schema.encode() import hashlib digest = hashlib.sha1(encoded_schema).hexdigest().encode() if skip_cache is", "only parsed once when generating the parent tuple. We could explore doing two", "\"required\": list(global_types.keys()), \"properties\": global_types, }, }, }, } def get_templates() -> Dict[str, Tuple[Any]]:", "= urlconf.urlpatterns # type: ignore[attr-defined] from django.urls import converters from django.urls.resolvers import RoutePattern", "regex] = { \"route\": f\"/{regex}\", \"args\": { arg_name: converter_mapping.get(arg_converter.__class__, \"string\") for arg_name, arg_converter", "not directly referenced by templates. These aren't exported by `json-schem-to-typescript` because they're referenced", "\"types\": get_types_schema(), \"values\": get_values(), } return json.dumps(schema, indent=4) class ReactivatedConfig(AppConfig): name = \"reactivated\"", "and os.path.exists(\"client/generated/index.tsx\"): with open(\"client/generated/index.tsx\", \"r+b\") as existing: already_generated = existing.read() if digest in", "logger.info(\"Skipping generation as nothing has changed\") return #: Note that we don't pass", "passes in the future. See `unreachableDefinitions` in json-schema-to-typescript \"\"\" type_registry[\"globals\"] = Any #", "= existing.read() if digest in already_generated: logger.info(\"Skipping generation as nothing has changed\") return", "for the subprocess of the dev server after restarts or initial start. pass", "create_context_processor_type for engine in settings.TEMPLATES: if engine[\"BACKEND\"] == \"reactivated.backend.JSX\": context_processors.extend(engine[\"OPTIONS\"][\"context_processors\"]) # type: ignore[index]", "for arg_name, arg_converter in pattern.converters.items() }, } return reverse def get_types_schema() -> Any:", "this function for your E2E test prep. \"\"\" logger.info(\"Generating interfaces and client side", ". import ( definitions_registry, extract_views_from_urlpatterns, global_types, template_registry, type_registry, value_registry, ) from .serialization import", "import Any, Dict, NamedTuple, Tuple from django.apps import AppConfig from django.conf import settings", "json-schema-to-typescript \"\"\" type_registry[\"globals\"] = Any # type: ignore[assignment] context_processors = [] from .serialization.context_processors", "webpack gets confused with the half-written file when we make updates. # Maybe", "that's only parsed once when generating the parent tuple. We could explore doing", "\"\"\" Django's dev server actually starts twice. So we prevent generation on the", "first start. TODO: handle noreload. \"\"\" schema = get_schema() if ( os.environ.get(\"WERKZEUG_RUN_MAIN\") ==", "{} for _, regex, name, pattern in urls: if not isinstance(pattern, RoutePattern): continue", "\"true\" return generate_schema(schema) def generate_schema(schema: str, skip_cache: bool = False) -> None: \"\"\"", "from django.urls import converters from django.urls.resolvers import RoutePattern converter_mapping = { converters.IntConverter: \"number\",", "Note that we don't pass the file object to stdout, because otherwise #", "process.communicate(encoded_schema) os.makedirs(\"client/generated\", exist_ok=True) with open(\"client/generated/index.tsx\", \"w+b\") as output: output.write(b\"// Digest: %s\\n\" % digest)", "from django.urls.resolvers import RoutePattern converter_mapping = { converters.IntConverter: \"number\", converters.StringConverter: \"string\", converters.UUIDConverter: \"string\",", "{ \"route\": f\"/{regex}\", \"args\": { arg_name: converter_mapping.get(arg_converter.__class__, \"string\") for arg_name, arg_converter in pattern.converters.items()", "stdout, because otherwise # webpack gets confused with the half-written file when we", "definitions_registry) definitions_registry.update(definitions) return { \"definitions\": definitions, **{ **definitions[\"reactivated.apps.ParentTuple\"], \"properties\": { **definitions[\"reactivated.apps.ParentTuple\"][\"properties\"], \"globals\": {", "None: \"\"\" For development usage only, this requires Node and Python installed You", "\"type\": \"object\", \"additionalProperties\": False, \"required\": list(global_types.keys()), \"properties\": global_types, }, }, }, } def", "and client side code\") encoded_schema = schema.encode() import hashlib digest = hashlib.sha1(encoded_schema).hexdigest().encode() if", "using `tsType`, so the libraary is unaware of their usage. So we register", "# type: ignore[misc] parent_schema, definitions = create_schema(ParentTuple, definitions_registry) definitions_registry.update(definitions) return { \"definitions\": definitions,", "get_types_schema() -> Any: \"\"\" The package json-schema-to-typescript does expose a way to automatically", "generated files. Instead, while creating the schema, we occasionally run into types that", "we don't pass the file object to stdout, because otherwise # webpack gets", "the libraary is unaware of their usage. So we register them in `globals`", "doing two passes in the future. See `unreachableDefinitions` in json-schema-to-typescript \"\"\" type_registry[\"globals\"] =", "reverse[name or regex] = { \"route\": f\"/{regex}\", \"args\": { arg_name: converter_mapping.get(arg_converter.__class__, \"string\") for", "types to the `type_registry` because that's only parsed once when generating the parent", "\"reactivated\" def ready(self) -> None: \"\"\" Django's dev server actually starts twice. So", "{ **definitions[\"reactivated.apps.ParentTuple\"][\"properties\"], \"globals\": { \"type\": \"object\", \"additionalProperties\": False, \"required\": list(global_types.keys()), \"properties\": global_types, },", "global_types, }, }, }, } def get_templates() -> Dict[str, Tuple[Any]]: return template_registry def", "\"true\" or os.environ.get(\"RUN_MAIN\") == \"true\" ): # Triggers for the subprocess of the", "Any: \"\"\" The package json-schema-to-typescript does expose a way to automatically export any", "definitions, **{ **definitions[\"reactivated.apps.ParentTuple\"], \"properties\": { **definitions[\"reactivated.apps.ParentTuple\"][\"properties\"], \"globals\": { \"type\": \"object\", \"additionalProperties\": False, \"required\":", "generate_schema(schema) def generate_schema(schema: str, skip_cache: bool = False) -> None: \"\"\" For development", "str: schema = { \"urls\": get_urls_schema(), \"templates\": get_templates(), \"types\": get_types_schema(), \"values\": get_values(), }", "== \"true\" ): # Triggers for the subprocess of the dev server after", "get_types_schema(), \"values\": get_values(), } return json.dumps(schema, indent=4) class ReactivatedConfig(AppConfig): name = \"reactivated\" def", "not isinstance(pattern, RoutePattern): continue reverse[name or regex] = { \"route\": f\"/{regex}\", \"args\": {", "urlpatterns = urlconf.urlpatterns # type: ignore[attr-defined] from django.urls import converters from django.urls.resolvers import", "logging import os import subprocess from typing import Any, Dict, NamedTuple, Tuple from", "because they're referenced using `tsType`, so the libraary is unaware of their usage.", "from django.apps import AppConfig from django.conf import settings from . import ( definitions_registry,", "hashlib.sha1(encoded_schema).hexdigest().encode() if skip_cache is False and os.path.exists(\"client/generated/index.tsx\"): with open(\"client/generated/index.tsx\", \"r+b\") as existing: already_generated", "extract_views_from_urlpatterns, global_types, template_registry, type_registry, value_registry, ) from .serialization import create_schema logger = logging.getLogger(\"django.server\")", "that we don't pass the file object to stdout, because otherwise # webpack", "converters.UUIDConverter: \"string\", converters.SlugConverter: \"string\", converters.PathConverter: \"string\", } urls = extract_views_from_urlpatterns(urlpatterns) # type: ignore[no-untyped-call]", "However, this can bloat our generated files. Instead, while creating the schema, we", "already_generated = existing.read() if digest in already_generated: logger.info(\"Skipping generation as nothing has changed\")", "can bloat our generated files. Instead, while creating the schema, we occasionally run", "and Python installed You can use this function for your E2E test prep.", "= Any # type: ignore[assignment] context_processors = [] from .serialization.context_processors import create_context_processor_type for", "\"\"\" logger.info(\"Generating interfaces and client side code\") encoded_schema = schema.encode() import hashlib digest", "reverse def get_types_schema() -> Any: \"\"\" The package json-schema-to-typescript does expose a way", "\"\"\" type_registry[\"globals\"] = Any # type: ignore[assignment] context_processors = [] from .serialization.context_processors import", "with the half-written file when we make updates. # Maybe there's a way", "ignore[misc] parent_schema, definitions = create_schema(ParentTuple, definitions_registry) definitions_registry.update(definitions) return { \"definitions\": definitions, **{ **definitions[\"reactivated.apps.ParentTuple\"],", "for your E2E test prep. \"\"\" logger.info(\"Generating interfaces and client side code\") encoded_schema", "bloat our generated files. Instead, while creating the schema, we occasionally run into", "django.apps import AppConfig from django.conf import settings from . import ( definitions_registry, extract_views_from_urlpatterns,", "= { \"route\": f\"/{regex}\", \"args\": { arg_name: converter_mapping.get(arg_converter.__class__, \"string\") for arg_name, arg_converter in", "stdout=subprocess.PIPE, stdin=subprocess.PIPE, ) out, error = process.communicate(encoded_schema) os.makedirs(\"client/generated\", exist_ok=True) with open(\"client/generated/index.tsx\", \"w+b\") as", "half-written file when we make updates. # Maybe there's a way to force", "converters.PathConverter: \"string\", } urls = extract_views_from_urlpatterns(urlpatterns) # type: ignore[no-untyped-call] reverse = {} for", "get_values(), } return json.dumps(schema, indent=4) class ReactivatedConfig(AppConfig): name = \"reactivated\" def ready(self) ->", "Django's dev server actually starts twice. So we prevent generation on the first", "\"true\" ): # Triggers for the subprocess of the dev server after restarts", "return json.dumps(schema, indent=4) class ReactivatedConfig(AppConfig): name = \"reactivated\" def ready(self) -> None: \"\"\"", "installed You can use this function for your E2E test prep. \"\"\" logger.info(\"Generating", "gets confused with the half-written file when we make updates. # Maybe there's", "if engine[\"BACKEND\"] == \"reactivated.backend.JSX\": context_processors.extend(engine[\"OPTIONS\"][\"context_processors\"]) # type: ignore[index] type_registry[\"Context\"] = create_context_processor_type(context_processors) ParentTuple =", "E2E test prep. \"\"\" logger.info(\"Generating interfaces and client side code\") encoded_schema = schema.encode()", "sees. However, this can bloat our generated files. Instead, while creating the schema,", "}, }, } def get_templates() -> Dict[str, Tuple[Any]]: return template_registry def get_values() ->", "schema = get_schema() if ( os.environ.get(\"WERKZEUG_RUN_MAIN\") == \"true\" or os.environ.get(\"RUN_MAIN\") == \"true\" ):", "converters.SlugConverter: \"string\", converters.PathConverter: \"string\", } urls = extract_views_from_urlpatterns(urlpatterns) # type: ignore[no-untyped-call] reverse =", "{ \"definitions\": definitions, **{ **definitions[\"reactivated.apps.ParentTuple\"], \"properties\": { **definitions[\"reactivated.apps.ParentTuple\"][\"properties\"], \"globals\": { \"type\": \"object\", \"additionalProperties\":", "in already_generated: logger.info(\"Skipping generation as nothing has changed\") return #: Note that we", "\"values\": get_values(), } return json.dumps(schema, indent=4) class ReactivatedConfig(AppConfig): name = \"reactivated\" def ready(self)", "\"\"\" schema = get_schema() if ( os.environ.get(\"WERKZEUG_RUN_MAIN\") == \"true\" or os.environ.get(\"RUN_MAIN\") == \"true\"", "urlconf = importlib.import_module(settings.ROOT_URLCONF) urlpatterns = urlconf.urlpatterns # type: ignore[attr-defined] from django.urls import converters", "open(\"client/generated/index.tsx\", \"r+b\") as existing: already_generated = existing.read() if digest in already_generated: logger.info(\"Skipping generation", "this requires Node and Python installed You can use this function for your", "= create_context_processor_type(context_processors) ParentTuple = NamedTuple(\"ParentTuple\", type_registry.items()) # type: ignore[misc] parent_schema, definitions = create_schema(ParentTuple,", "list(global_types.keys()), \"properties\": global_types, }, }, }, } def get_templates() -> Dict[str, Tuple[Any]]: return", "handle noreload. \"\"\" schema = get_schema() if ( os.environ.get(\"WERKZEUG_RUN_MAIN\") == \"true\" or os.environ.get(\"RUN_MAIN\")", "{ \"type\": \"object\", \"additionalProperties\": False, \"required\": list(global_types.keys()), \"properties\": global_types, }, }, }, }", "= importlib.import_module(settings.ROOT_URLCONF) urlpatterns = urlconf.urlpatterns # type: ignore[attr-defined] from django.urls import converters from", "if digest in already_generated: logger.info(\"Skipping generation as nothing has changed\") return #: Note", "arg_name, arg_converter in pattern.converters.items() }, } return reverse def get_types_schema() -> Any: \"\"\"", "can use this function for your E2E test prep. \"\"\" logger.info(\"Generating interfaces and", "two passes in the future. See `unreachableDefinitions` in json-schema-to-typescript \"\"\" type_registry[\"globals\"] = Any", "} def get_templates() -> Dict[str, Tuple[Any]]: return template_registry def get_values() -> Dict[str, Any]:", "import json import logging import os import subprocess from typing import Any, Dict,", "get_urls_schema() -> Dict[str, Any]: urlconf = importlib.import_module(settings.ROOT_URLCONF) urlpatterns = urlconf.urlpatterns # type: ignore[attr-defined]", "= \"DJANGO_SEVER_STARTING\" in os.environ if is_server_started is False: os.environ[\"DJANGO_SEVER_STARTING\"] = \"true\" return generate_schema(schema)", "ignore[attr-defined] from django.urls import converters from django.urls.resolvers import RoutePattern converter_mapping = { converters.IntConverter:", "TODO: handle noreload. \"\"\" schema = get_schema() if ( os.environ.get(\"WERKZEUG_RUN_MAIN\") == \"true\" or", "to automatically export any interface it sees. However, this can bloat our generated", "== \"true\" or os.environ.get(\"RUN_MAIN\") == \"true\" ): # Triggers for the subprocess of", "= {} for _, regex, name, pattern in urls: if not isinstance(pattern, RoutePattern):", "os import subprocess from typing import Any, Dict, NamedTuple, Tuple from django.apps import", "json-schema-to-typescript does expose a way to automatically export any interface it sees. However,", "= process.communicate(encoded_schema) os.makedirs(\"client/generated\", exist_ok=True) with open(\"client/generated/index.tsx\", \"w+b\") as output: output.write(b\"// Digest: %s\\n\" %", "`tsType`, so the libraary is unaware of their usage. So we register them", "just add these types to the `type_registry` because that's only parsed once when", "Triggers for the subprocess of the dev server after restarts or initial start.", "} return reverse def get_types_schema() -> Any: \"\"\" The package json-schema-to-typescript does expose", "because otherwise # webpack gets confused with the half-written file when we make", "django.conf import settings from . import ( definitions_registry, extract_views_from_urlpatterns, global_types, template_registry, type_registry, value_registry,", "}, } return reverse def get_types_schema() -> Any: \"\"\" The package json-schema-to-typescript does", "def get_values() -> Dict[str, Any]: return value_registry def get_schema() -> str: schema =", "} return json.dumps(schema, indent=4) class ReactivatedConfig(AppConfig): name = \"reactivated\" def ready(self) -> None:", "`globals` and force `json-schema-to-typescript` to expose them. We can't just add these types", "they're referenced using `tsType`, so the libraary is unaware of their usage. So", "\"\"\" For development usage only, this requires Node and Python installed You can", "subprocess.Popen( [\"node\", \"./node_modules/reactivated/generator.js\"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, ) out, error = process.communicate(encoded_schema) os.makedirs(\"client/generated\", exist_ok=True) with", "files. Instead, while creating the schema, we occasionally run into types that we", "server actually starts twice. So we prevent generation on the first start. TODO:", "existing: already_generated = existing.read() if digest in already_generated: logger.info(\"Skipping generation as nothing has", ") out, error = process.communicate(encoded_schema) os.makedirs(\"client/generated\", exist_ok=True) with open(\"client/generated/index.tsx\", \"w+b\") as output: output.write(b\"//", "in os.environ if is_server_started is False: os.environ[\"DJANGO_SEVER_STARTING\"] = \"true\" return generate_schema(schema) def generate_schema(schema:", "as existing: already_generated = existing.read() if digest in already_generated: logger.info(\"Skipping generation as nothing", "add these types to the `type_registry` because that's only parsed once when generating", "Dict[str, Any]: return value_registry def get_schema() -> str: schema = { \"urls\": get_urls_schema(),", "force it to be a single atomic write? I tried # open('w+b', buffering=0)", "import subprocess from typing import Any, Dict, NamedTuple, Tuple from django.apps import AppConfig", "create_schema logger = logging.getLogger(\"django.server\") def get_urls_schema() -> Dict[str, Any]: urlconf = importlib.import_module(settings.ROOT_URLCONF) urlpatterns", "restarts or initial start. pass is_server_started = \"DJANGO_SEVER_STARTING\" in os.environ if is_server_started is", "type_registry[\"Context\"] = create_context_processor_type(context_processors) ParentTuple = NamedTuple(\"ParentTuple\", type_registry.items()) # type: ignore[misc] parent_schema, definitions =", "globally but are not directly referenced by templates. These aren't exported by `json-schem-to-typescript`", "= create_schema(ParentTuple, definitions_registry) definitions_registry.update(definitions) return { \"definitions\": definitions, **{ **definitions[\"reactivated.apps.ParentTuple\"], \"properties\": { **definitions[\"reactivated.apps.ParentTuple\"][\"properties\"],", "\"definitions\": definitions, **{ **definitions[\"reactivated.apps.ParentTuple\"], \"properties\": { **definitions[\"reactivated.apps.ParentTuple\"][\"properties\"], \"globals\": { \"type\": \"object\", \"additionalProperties\": False,", "False) -> None: \"\"\" For development usage only, this requires Node and Python", "name = \"reactivated\" def ready(self) -> None: \"\"\" Django's dev server actually starts", "get_schema() -> str: schema = { \"urls\": get_urls_schema(), \"templates\": get_templates(), \"types\": get_types_schema(), \"values\":", "os.path.exists(\"client/generated/index.tsx\"): with open(\"client/generated/index.tsx\", \"r+b\") as existing: already_generated = existing.read() if digest in already_generated:", "isinstance(pattern, RoutePattern): continue reverse[name or regex] = { \"route\": f\"/{regex}\", \"args\": { arg_name:", "\"globals\": { \"type\": \"object\", \"additionalProperties\": False, \"required\": list(global_types.keys()), \"properties\": global_types, }, }, },", "generation as nothing has changed\") return #: Note that we don't pass the", "converters.IntConverter: \"number\", converters.StringConverter: \"string\", converters.UUIDConverter: \"string\", converters.SlugConverter: \"string\", converters.PathConverter: \"string\", } urls =", "}, } def get_templates() -> Dict[str, Tuple[Any]]: return template_registry def get_values() -> Dict[str,", "# webpack gets confused with the half-written file when we make updates. #", ") from .serialization import create_schema logger = logging.getLogger(\"django.server\") def get_urls_schema() -> Dict[str, Any]:", "the schema, we occasionally run into types that we want available globally but", "\"number\", converters.StringConverter: \"string\", converters.UUIDConverter: \"string\", converters.SlugConverter: \"string\", converters.PathConverter: \"string\", } urls = extract_views_from_urlpatterns(urlpatterns)", "def ready(self) -> None: \"\"\" Django's dev server actually starts twice. So we", "return generate_schema(schema) def generate_schema(schema: str, skip_cache: bool = False) -> None: \"\"\" For", "type_registry[\"globals\"] = Any # type: ignore[assignment] context_processors = [] from .serialization.context_processors import create_context_processor_type", "# Maybe there's a way to force it to be a single atomic", "create_schema(ParentTuple, definitions_registry) definitions_registry.update(definitions) return { \"definitions\": definitions, **{ **definitions[\"reactivated.apps.ParentTuple\"], \"properties\": { **definitions[\"reactivated.apps.ParentTuple\"][\"properties\"], \"globals\":", "referenced using `tsType`, so the libraary is unaware of their usage. So we", "definitions_registry.update(definitions) return { \"definitions\": definitions, **{ **definitions[\"reactivated.apps.ParentTuple\"], \"properties\": { **definitions[\"reactivated.apps.ParentTuple\"][\"properties\"], \"globals\": { \"type\":", "has changed\") return #: Note that we don't pass the file object to", "of the dev server after restarts or initial start. pass is_server_started = \"DJANGO_SEVER_STARTING\"", "bool = False) -> None: \"\"\" For development usage only, this requires Node", "changed\") return #: Note that we don't pass the file object to stdout,", "type: ignore[assignment] context_processors = [] from .serialization.context_processors import create_context_processor_type for engine in settings.TEMPLATES:", "test prep. \"\"\" logger.info(\"Generating interfaces and client side code\") encoded_schema = schema.encode() import", "ignore[index] type_registry[\"Context\"] = create_context_processor_type(context_processors) ParentTuple = NamedTuple(\"ParentTuple\", type_registry.items()) # type: ignore[misc] parent_schema, definitions", "file object to stdout, because otherwise # webpack gets confused with the half-written", "AppConfig from django.conf import settings from . import ( definitions_registry, extract_views_from_urlpatterns, global_types, template_registry,", "import hashlib digest = hashlib.sha1(encoded_schema).hexdigest().encode() if skip_cache is False and os.path.exists(\"client/generated/index.tsx\"): with open(\"client/generated/index.tsx\",", "import os import subprocess from typing import Any, Dict, NamedTuple, Tuple from django.apps", "# type: ignore[index] type_registry[\"Context\"] = create_context_processor_type(context_processors) ParentTuple = NamedTuple(\"ParentTuple\", type_registry.items()) # type: ignore[misc]", "import logging import os import subprocess from typing import Any, Dict, NamedTuple, Tuple", "automatically export any interface it sees. However, this can bloat our generated files.", "json.dumps(schema, indent=4) class ReactivatedConfig(AppConfig): name = \"reactivated\" def ready(self) -> None: \"\"\" Django's", "`json-schem-to-typescript` because they're referenced using `tsType`, so the libraary is unaware of their", "Any # type: ignore[assignment] context_processors = [] from .serialization.context_processors import create_context_processor_type for engine", "only, this requires Node and Python installed You can use this function for", "regex, name, pattern in urls: if not isinstance(pattern, RoutePattern): continue reverse[name or regex]", "get_templates(), \"types\": get_types_schema(), \"values\": get_values(), } return json.dumps(schema, indent=4) class ReactivatedConfig(AppConfig): name =", "You can use this function for your E2E test prep. \"\"\" logger.info(\"Generating interfaces", "in settings.TEMPLATES: if engine[\"BACKEND\"] == \"reactivated.backend.JSX\": context_processors.extend(engine[\"OPTIONS\"][\"context_processors\"]) # type: ignore[index] type_registry[\"Context\"] = create_context_processor_type(context_processors)", "-> None: \"\"\" For development usage only, this requires Node and Python installed", "import create_schema logger = logging.getLogger(\"django.server\") def get_urls_schema() -> Dict[str, Any]: urlconf = importlib.import_module(settings.ROOT_URLCONF)", "# type: ignore[attr-defined] from django.urls import converters from django.urls.resolvers import RoutePattern converter_mapping =", "\"properties\": global_types, }, }, }, } def get_templates() -> Dict[str, Tuple[Any]]: return template_registry", "django.urls.resolvers import RoutePattern converter_mapping = { converters.IntConverter: \"number\", converters.StringConverter: \"string\", converters.UUIDConverter: \"string\", converters.SlugConverter:", "package json-schema-to-typescript does expose a way to automatically export any interface it sees.", "our generated files. Instead, while creating the schema, we occasionally run into types", "to be a single atomic write? I tried # open('w+b', buffering=0) but no", "import settings from . import ( definitions_registry, extract_views_from_urlpatterns, global_types, template_registry, type_registry, value_registry, )", "is False: os.environ[\"DJANGO_SEVER_STARTING\"] = \"true\" return generate_schema(schema) def generate_schema(schema: str, skip_cache: bool =", "return value_registry def get_schema() -> str: schema = { \"urls\": get_urls_schema(), \"templates\": get_templates(),", "in `globals` and force `json-schema-to-typescript` to expose them. We can't just add these", "luck. process = subprocess.Popen( [\"node\", \"./node_modules/reactivated/generator.js\"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, ) out, error = process.communicate(encoded_schema)", "usage only, this requires Node and Python installed You can use this function", "def get_types_schema() -> Any: \"\"\" The package json-schema-to-typescript does expose a way to", "def get_schema() -> str: schema = { \"urls\": get_urls_schema(), \"templates\": get_templates(), \"types\": get_types_schema(),", "Tuple[Any]]: return template_registry def get_values() -> Dict[str, Any]: return value_registry def get_schema() ->", "prep. \"\"\" logger.info(\"Generating interfaces and client side code\") encoded_schema = schema.encode() import hashlib", "-> str: schema = { \"urls\": get_urls_schema(), \"templates\": get_templates(), \"types\": get_types_schema(), \"values\": get_values(),", "get_values() -> Dict[str, Any]: return value_registry def get_schema() -> str: schema = {", "import ( definitions_registry, extract_views_from_urlpatterns, global_types, template_registry, type_registry, value_registry, ) from .serialization import create_schema", "ignore[no-untyped-call] reverse = {} for _, regex, name, pattern in urls: if not", "These aren't exported by `json-schem-to-typescript` because they're referenced using `tsType`, so the libraary", "indent=4) class ReactivatedConfig(AppConfig): name = \"reactivated\" def ready(self) -> None: \"\"\" Django's dev", "arg_name: converter_mapping.get(arg_converter.__class__, \"string\") for arg_name, arg_converter in pattern.converters.items() }, } return reverse def", "**definitions[\"reactivated.apps.ParentTuple\"][\"properties\"], \"globals\": { \"type\": \"object\", \"additionalProperties\": False, \"required\": list(global_types.keys()), \"properties\": global_types, }, },", "type_registry.items()) # type: ignore[misc] parent_schema, definitions = create_schema(ParentTuple, definitions_registry) definitions_registry.update(definitions) return { \"definitions\":", "type: ignore[misc] parent_schema, definitions = create_schema(ParentTuple, definitions_registry) definitions_registry.update(definitions) return { \"definitions\": definitions, **{", "the dev server after restarts or initial start. pass is_server_started = \"DJANGO_SEVER_STARTING\" in", "interface it sees. However, this can bloat our generated files. Instead, while creating", "a single atomic write? I tried # open('w+b', buffering=0) but no luck. process", "libraary is unaware of their usage. So we register them in `globals` and", "Any]: urlconf = importlib.import_module(settings.ROOT_URLCONF) urlpatterns = urlconf.urlpatterns # type: ignore[attr-defined] from django.urls import", "ready(self) -> None: \"\"\" Django's dev server actually starts twice. So we prevent", "os.environ.get(\"RUN_MAIN\") == \"true\" ): # Triggers for the subprocess of the dev server", "from . import ( definitions_registry, extract_views_from_urlpatterns, global_types, template_registry, type_registry, value_registry, ) from .serialization", "subprocess of the dev server after restarts or initial start. pass is_server_started =", "register them in `globals` and force `json-schema-to-typescript` to expose them. We can't just", "\"route\": f\"/{regex}\", \"args\": { arg_name: converter_mapping.get(arg_converter.__class__, \"string\") for arg_name, arg_converter in pattern.converters.items() },", "from typing import Any, Dict, NamedTuple, Tuple from django.apps import AppConfig from django.conf", "exist_ok=True) with open(\"client/generated/index.tsx\", \"w+b\") as output: output.write(b\"// Digest: %s\\n\" % digest) output.write(out) logger.info(\"Finished", "for _, regex, name, pattern in urls: if not isinstance(pattern, RoutePattern): continue reverse[name", "by templates. These aren't exported by `json-schem-to-typescript` because they're referenced using `tsType`, so", "or initial start. pass is_server_started = \"DJANGO_SEVER_STARTING\" in os.environ if is_server_started is False:", "encoded_schema = schema.encode() import hashlib digest = hashlib.sha1(encoded_schema).hexdigest().encode() if skip_cache is False and", "directly referenced by templates. These aren't exported by `json-schem-to-typescript` because they're referenced using", "parsed once when generating the parent tuple. We could explore doing two passes", "= extract_views_from_urlpatterns(urlpatterns) # type: ignore[no-untyped-call] reverse = {} for _, regex, name, pattern", "start. pass is_server_started = \"DJANGO_SEVER_STARTING\" in os.environ if is_server_started is False: os.environ[\"DJANGO_SEVER_STARTING\"] =", "existing.read() if digest in already_generated: logger.info(\"Skipping generation as nothing has changed\") return #:", "arg_converter in pattern.converters.items() }, } return reverse def get_types_schema() -> Any: \"\"\" The", "importlib import json import logging import os import subprocess from typing import Any,", "available globally but are not directly referenced by templates. These aren't exported by", "Maybe there's a way to force it to be a single atomic write?", "str, skip_cache: bool = False) -> None: \"\"\" For development usage only, this", "NamedTuple, Tuple from django.apps import AppConfig from django.conf import settings from . import", "_, regex, name, pattern in urls: if not isinstance(pattern, RoutePattern): continue reverse[name or", "value_registry, ) from .serialization import create_schema logger = logging.getLogger(\"django.server\") def get_urls_schema() -> Dict[str,", "is False and os.path.exists(\"client/generated/index.tsx\"): with open(\"client/generated/index.tsx\", \"r+b\") as existing: already_generated = existing.read() if", "\"string\", converters.SlugConverter: \"string\", converters.PathConverter: \"string\", } urls = extract_views_from_urlpatterns(urlpatterns) # type: ignore[no-untyped-call] reverse", "digest in already_generated: logger.info(\"Skipping generation as nothing has changed\") return #: Note that", "return reverse def get_types_schema() -> Any: \"\"\" The package json-schema-to-typescript does expose a", "return template_registry def get_values() -> Dict[str, Any]: return value_registry def get_schema() -> str:", "in json-schema-to-typescript \"\"\" type_registry[\"globals\"] = Any # type: ignore[assignment] context_processors = [] from", "-> Any: \"\"\" The package json-schema-to-typescript does expose a way to automatically export", "of their usage. So we register them in `globals` and force `json-schema-to-typescript` to", "\"args\": { arg_name: converter_mapping.get(arg_converter.__class__, \"string\") for arg_name, arg_converter in pattern.converters.items() }, } return", "we prevent generation on the first start. TODO: handle noreload. \"\"\" schema =", "on the first start. TODO: handle noreload. \"\"\" schema = get_schema() if (", "= hashlib.sha1(encoded_schema).hexdigest().encode() if skip_cache is False and os.path.exists(\"client/generated/index.tsx\"): with open(\"client/generated/index.tsx\", \"r+b\") as existing:", "RoutePattern): continue reverse[name or regex] = { \"route\": f\"/{regex}\", \"args\": { arg_name: converter_mapping.get(arg_converter.__class__,", "Any]: return value_registry def get_schema() -> str: schema = { \"urls\": get_urls_schema(), \"templates\":", "django.urls import converters from django.urls.resolvers import RoutePattern converter_mapping = { converters.IntConverter: \"number\", converters.StringConverter:", "object to stdout, because otherwise # webpack gets confused with the half-written file", "use this function for your E2E test prep. \"\"\" logger.info(\"Generating interfaces and client", "does expose a way to automatically export any interface it sees. However, this", "the file object to stdout, because otherwise # webpack gets confused with the", "settings from . import ( definitions_registry, extract_views_from_urlpatterns, global_types, template_registry, type_registry, value_registry, ) from", "False, \"required\": list(global_types.keys()), \"properties\": global_types, }, }, }, } def get_templates() -> Dict[str,", "noreload. \"\"\" schema = get_schema() if ( os.environ.get(\"WERKZEUG_RUN_MAIN\") == \"true\" or os.environ.get(\"RUN_MAIN\") ==", "\"urls\": get_urls_schema(), \"templates\": get_templates(), \"types\": get_types_schema(), \"values\": get_values(), } return json.dumps(schema, indent=4) class", "initial start. pass is_server_started = \"DJANGO_SEVER_STARTING\" in os.environ if is_server_started is False: os.environ[\"DJANGO_SEVER_STARTING\"]", "creating the schema, we occasionally run into types that we want available globally", "== \"reactivated.backend.JSX\": context_processors.extend(engine[\"OPTIONS\"][\"context_processors\"]) # type: ignore[index] type_registry[\"Context\"] = create_context_processor_type(context_processors) ParentTuple = NamedTuple(\"ParentTuple\", type_registry.items())", "open('w+b', buffering=0) but no luck. process = subprocess.Popen( [\"node\", \"./node_modules/reactivated/generator.js\"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, )", "class ReactivatedConfig(AppConfig): name = \"reactivated\" def ready(self) -> None: \"\"\" Django's dev server", "this can bloat our generated files. Instead, while creating the schema, we occasionally", "os.environ.get(\"WERKZEUG_RUN_MAIN\") == \"true\" or os.environ.get(\"RUN_MAIN\") == \"true\" ): # Triggers for the subprocess", "we occasionally run into types that we want available globally but are not", "-> Dict[str, Any]: urlconf = importlib.import_module(settings.ROOT_URLCONF) urlpatterns = urlconf.urlpatterns # type: ignore[attr-defined] from", "from .serialization.context_processors import create_context_processor_type for engine in settings.TEMPLATES: if engine[\"BACKEND\"] == \"reactivated.backend.JSX\": context_processors.extend(engine[\"OPTIONS\"][\"context_processors\"])", "-> None: \"\"\" Django's dev server actually starts twice. So we prevent generation", "type: ignore[attr-defined] from django.urls import converters from django.urls.resolvers import RoutePattern converter_mapping = {", "when generating the parent tuple. We could explore doing two passes in the", "usage. So we register them in `globals` and force `json-schema-to-typescript` to expose them.", "skip_cache is False and os.path.exists(\"client/generated/index.tsx\"): with open(\"client/generated/index.tsx\", \"r+b\") as existing: already_generated = existing.read()", "into types that we want available globally but are not directly referenced by", "extract_views_from_urlpatterns(urlpatterns) # type: ignore[no-untyped-call] reverse = {} for _, regex, name, pattern in", "( os.environ.get(\"WERKZEUG_RUN_MAIN\") == \"true\" or os.environ.get(\"RUN_MAIN\") == \"true\" ): # Triggers for the", "with open(\"client/generated/index.tsx\", \"w+b\") as output: output.write(b\"// Digest: %s\\n\" % digest) output.write(out) logger.info(\"Finished generating.\")", "logger = logging.getLogger(\"django.server\") def get_urls_schema() -> Dict[str, Any]: urlconf = importlib.import_module(settings.ROOT_URLCONF) urlpatterns =", "\"string\", converters.PathConverter: \"string\", } urls = extract_views_from_urlpatterns(urlpatterns) # type: ignore[no-untyped-call] reverse = {}", "ignore[assignment] context_processors = [] from .serialization.context_processors import create_context_processor_type for engine in settings.TEMPLATES: if", "engine in settings.TEMPLATES: if engine[\"BACKEND\"] == \"reactivated.backend.JSX\": context_processors.extend(engine[\"OPTIONS\"][\"context_processors\"]) # type: ignore[index] type_registry[\"Context\"] =", "get_urls_schema(), \"templates\": get_templates(), \"types\": get_types_schema(), \"values\": get_values(), } return json.dumps(schema, indent=4) class ReactivatedConfig(AppConfig):", "parent_schema, definitions = create_schema(ParentTuple, definitions_registry) definitions_registry.update(definitions) return { \"definitions\": definitions, **{ **definitions[\"reactivated.apps.ParentTuple\"], \"properties\":", "def get_templates() -> Dict[str, Tuple[Any]]: return template_registry def get_values() -> Dict[str, Any]: return", "Any, Dict, NamedTuple, Tuple from django.apps import AppConfig from django.conf import settings from", "server after restarts or initial start. pass is_server_started = \"DJANGO_SEVER_STARTING\" in os.environ if", "client side code\") encoded_schema = schema.encode() import hashlib digest = hashlib.sha1(encoded_schema).hexdigest().encode() if skip_cache", "generation on the first start. TODO: handle noreload. \"\"\" schema = get_schema() if", "import RoutePattern converter_mapping = { converters.IntConverter: \"number\", converters.StringConverter: \"string\", converters.UUIDConverter: \"string\", converters.SlugConverter: \"string\",", "Dict[str, Tuple[Any]]: return template_registry def get_values() -> Dict[str, Any]: return value_registry def get_schema()", "template_registry def get_values() -> Dict[str, Any]: return value_registry def get_schema() -> str: schema", "for engine in settings.TEMPLATES: if engine[\"BACKEND\"] == \"reactivated.backend.JSX\": context_processors.extend(engine[\"OPTIONS\"][\"context_processors\"]) # type: ignore[index] type_registry[\"Context\"]", "definitions_registry, extract_views_from_urlpatterns, global_types, template_registry, type_registry, value_registry, ) from .serialization import create_schema logger =", "{ \"urls\": get_urls_schema(), \"templates\": get_templates(), \"types\": get_types_schema(), \"values\": get_values(), } return json.dumps(schema, indent=4)", "subprocess from typing import Any, Dict, NamedTuple, Tuple from django.apps import AppConfig from", "schema, we occasionally run into types that we want available globally but are", ".serialization import create_schema logger = logging.getLogger(\"django.server\") def get_urls_schema() -> Dict[str, Any]: urlconf =", "with open(\"client/generated/index.tsx\", \"r+b\") as existing: already_generated = existing.read() if digest in already_generated: logger.info(\"Skipping", "to expose them. We can't just add these types to the `type_registry` because", "no luck. process = subprocess.Popen( [\"node\", \"./node_modules/reactivated/generator.js\"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, ) out, error =", "your E2E test prep. \"\"\" logger.info(\"Generating interfaces and client side code\") encoded_schema =", "out, error = process.communicate(encoded_schema) os.makedirs(\"client/generated\", exist_ok=True) with open(\"client/generated/index.tsx\", \"w+b\") as output: output.write(b\"// Digest:", "from django.conf import settings from . import ( definitions_registry, extract_views_from_urlpatterns, global_types, template_registry, type_registry,", "We can't just add these types to the `type_registry` because that's only parsed", "pass the file object to stdout, because otherwise # webpack gets confused with", "def get_urls_schema() -> Dict[str, Any]: urlconf = importlib.import_module(settings.ROOT_URLCONF) urlpatterns = urlconf.urlpatterns # type:", "there's a way to force it to be a single atomic write? I", "schema.encode() import hashlib digest = hashlib.sha1(encoded_schema).hexdigest().encode() if skip_cache is False and os.path.exists(\"client/generated/index.tsx\"): with", "[] from .serialization.context_processors import create_context_processor_type for engine in settings.TEMPLATES: if engine[\"BACKEND\"] == \"reactivated.backend.JSX\":", "when we make updates. # Maybe there's a way to force it to", "we register them in `globals` and force `json-schema-to-typescript` to expose them. We can't", "but no luck. process = subprocess.Popen( [\"node\", \"./node_modules/reactivated/generator.js\"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, ) out, error", "urls = extract_views_from_urlpatterns(urlpatterns) # type: ignore[no-untyped-call] reverse = {} for _, regex, name,", "( definitions_registry, extract_views_from_urlpatterns, global_types, template_registry, type_registry, value_registry, ) from .serialization import create_schema logger", "Python installed You can use this function for your E2E test prep. \"\"\"", "way to automatically export any interface it sees. However, this can bloat our", "while creating the schema, we occasionally run into types that we want available", "twice. So we prevent generation on the first start. TODO: handle noreload. \"\"\"", "in urls: if not isinstance(pattern, RoutePattern): continue reverse[name or regex] = { \"route\":", "return { \"definitions\": definitions, **{ **definitions[\"reactivated.apps.ParentTuple\"], \"properties\": { **definitions[\"reactivated.apps.ParentTuple\"][\"properties\"], \"globals\": { \"type\": \"object\",", "the half-written file when we make updates. # Maybe there's a way to", "= \"reactivated\" def ready(self) -> None: \"\"\" Django's dev server actually starts twice.", "make updates. # Maybe there's a way to force it to be a", "continue reverse[name or regex] = { \"route\": f\"/{regex}\", \"args\": { arg_name: converter_mapping.get(arg_converter.__class__, \"string\")", "skip_cache: bool = False) -> None: \"\"\" For development usage only, this requires", "# open('w+b', buffering=0) but no luck. process = subprocess.Popen( [\"node\", \"./node_modules/reactivated/generator.js\"], stdout=subprocess.PIPE, stdin=subprocess.PIPE,", "**definitions[\"reactivated.apps.ParentTuple\"], \"properties\": { **definitions[\"reactivated.apps.ParentTuple\"][\"properties\"], \"globals\": { \"type\": \"object\", \"additionalProperties\": False, \"required\": list(global_types.keys()), \"properties\":", "Instead, while creating the schema, we occasionally run into types that we want", "= subprocess.Popen( [\"node\", \"./node_modules/reactivated/generator.js\"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, ) out, error = process.communicate(encoded_schema) os.makedirs(\"client/generated\", exist_ok=True)", "ParentTuple = NamedTuple(\"ParentTuple\", type_registry.items()) # type: ignore[misc] parent_schema, definitions = create_schema(ParentTuple, definitions_registry) definitions_registry.update(definitions)", "}, }, }, } def get_templates() -> Dict[str, Tuple[Any]]: return template_registry def get_values()", "it sees. However, this can bloat our generated files. Instead, while creating the", "We could explore doing two passes in the future. See `unreachableDefinitions` in json-schema-to-typescript", "want available globally but are not directly referenced by templates. These aren't exported", "definitions = create_schema(ParentTuple, definitions_registry) definitions_registry.update(definitions) return { \"definitions\": definitions, **{ **definitions[\"reactivated.apps.ParentTuple\"], \"properties\": {", "buffering=0) but no luck. process = subprocess.Popen( [\"node\", \"./node_modules/reactivated/generator.js\"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, ) out,", "See `unreachableDefinitions` in json-schema-to-typescript \"\"\" type_registry[\"globals\"] = Any # type: ignore[assignment] context_processors =", "be a single atomic write? I tried # open('w+b', buffering=0) but no luck.", "get_templates() -> Dict[str, Tuple[Any]]: return template_registry def get_values() -> Dict[str, Any]: return value_registry", "= get_schema() if ( os.environ.get(\"WERKZEUG_RUN_MAIN\") == \"true\" or os.environ.get(\"RUN_MAIN\") == \"true\" ): #", "create_context_processor_type(context_processors) ParentTuple = NamedTuple(\"ParentTuple\", type_registry.items()) # type: ignore[misc] parent_schema, definitions = create_schema(ParentTuple, definitions_registry)", "importlib.import_module(settings.ROOT_URLCONF) urlpatterns = urlconf.urlpatterns # type: ignore[attr-defined] from django.urls import converters from django.urls.resolvers", "\"string\") for arg_name, arg_converter in pattern.converters.items() }, } return reverse def get_types_schema() ->", "#: Note that we don't pass the file object to stdout, because otherwise", "NamedTuple(\"ParentTuple\", type_registry.items()) # type: ignore[misc] parent_schema, definitions = create_schema(ParentTuple, definitions_registry) definitions_registry.update(definitions) return {", "template_registry, type_registry, value_registry, ) from .serialization import create_schema logger = logging.getLogger(\"django.server\") def get_urls_schema()", "are not directly referenced by templates. These aren't exported by `json-schem-to-typescript` because they're", "False and os.path.exists(\"client/generated/index.tsx\"): with open(\"client/generated/index.tsx\", \"r+b\") as existing: already_generated = existing.read() if digest", "value_registry def get_schema() -> str: schema = { \"urls\": get_urls_schema(), \"templates\": get_templates(), \"types\":", "if ( os.environ.get(\"WERKZEUG_RUN_MAIN\") == \"true\" or os.environ.get(\"RUN_MAIN\") == \"true\" ): # Triggers for", "could explore doing two passes in the future. See `unreachableDefinitions` in json-schema-to-typescript \"\"\"", "os.environ[\"DJANGO_SEVER_STARTING\"] = \"true\" return generate_schema(schema) def generate_schema(schema: str, skip_cache: bool = False) ->", "hashlib digest = hashlib.sha1(encoded_schema).hexdigest().encode() if skip_cache is False and os.path.exists(\"client/generated/index.tsx\"): with open(\"client/generated/index.tsx\", \"r+b\")", "prevent generation on the first start. TODO: handle noreload. \"\"\" schema = get_schema()", "urls: if not isinstance(pattern, RoutePattern): continue reverse[name or regex] = { \"route\": f\"/{regex}\",", "their usage. So we register them in `globals` and force `json-schema-to-typescript` to expose", "that we want available globally but are not directly referenced by templates. These", "settings.TEMPLATES: if engine[\"BACKEND\"] == \"reactivated.backend.JSX\": context_processors.extend(engine[\"OPTIONS\"][\"context_processors\"]) # type: ignore[index] type_registry[\"Context\"] = create_context_processor_type(context_processors) ParentTuple", "type: ignore[index] type_registry[\"Context\"] = create_context_processor_type(context_processors) ParentTuple = NamedTuple(\"ParentTuple\", type_registry.items()) # type: ignore[misc] parent_schema,", "RoutePattern converter_mapping = { converters.IntConverter: \"number\", converters.StringConverter: \"string\", converters.UUIDConverter: \"string\", converters.SlugConverter: \"string\", converters.PathConverter:", "file when we make updates. # Maybe there's a way to force it", "return #: Note that we don't pass the file object to stdout, because", "them in `globals` and force `json-schema-to-typescript` to expose them. We can't just add", "dev server actually starts twice. So we prevent generation on the first start.", "= schema.encode() import hashlib digest = hashlib.sha1(encoded_schema).hexdigest().encode() if skip_cache is False and os.path.exists(\"client/generated/index.tsx\"):", "import converters from django.urls.resolvers import RoutePattern converter_mapping = { converters.IntConverter: \"number\", converters.StringConverter: \"string\",", "So we register them in `globals` and force `json-schema-to-typescript` to expose them. We", "otherwise # webpack gets confused with the half-written file when we make updates.", "Node and Python installed You can use this function for your E2E test", "templates. These aren't exported by `json-schem-to-typescript` because they're referenced using `tsType`, so the", "single atomic write? I tried # open('w+b', buffering=0) but no luck. process =", "-> Dict[str, Any]: return value_registry def get_schema() -> str: schema = { \"urls\":", "tuple. We could explore doing two passes in the future. See `unreachableDefinitions` in", "Dict, NamedTuple, Tuple from django.apps import AppConfig from django.conf import settings from .", "ReactivatedConfig(AppConfig): name = \"reactivated\" def ready(self) -> None: \"\"\" Django's dev server actually", "can't just add these types to the `type_registry` because that's only parsed once", "} urls = extract_views_from_urlpatterns(urlpatterns) # type: ignore[no-untyped-call] reverse = {} for _, regex,", "it to be a single atomic write? I tried # open('w+b', buffering=0) but", "the future. See `unreachableDefinitions` in json-schema-to-typescript \"\"\" type_registry[\"globals\"] = Any # type: ignore[assignment]", "types that we want available globally but are not directly referenced by templates.", "if not isinstance(pattern, RoutePattern): continue reverse[name or regex] = { \"route\": f\"/{regex}\", \"args\":", "So we prevent generation on the first start. TODO: handle noreload. \"\"\" schema", "context_processors = [] from .serialization.context_processors import create_context_processor_type for engine in settings.TEMPLATES: if engine[\"BACKEND\"]", "after restarts or initial start. pass is_server_started = \"DJANGO_SEVER_STARTING\" in os.environ if is_server_started", "= \"true\" return generate_schema(schema) def generate_schema(schema: str, skip_cache: bool = False) -> None:", "aren't exported by `json-schem-to-typescript` because they're referenced using `tsType`, so the libraary is", "urlconf.urlpatterns # type: ignore[attr-defined] from django.urls import converters from django.urls.resolvers import RoutePattern converter_mapping", "reverse = {} for _, regex, name, pattern in urls: if not isinstance(pattern,", "explore doing two passes in the future. See `unreachableDefinitions` in json-schema-to-typescript \"\"\" type_registry[\"globals\"]", "False: os.environ[\"DJANGO_SEVER_STARTING\"] = \"true\" return generate_schema(schema) def generate_schema(schema: str, skip_cache: bool = False)", "to the `type_registry` because that's only parsed once when generating the parent tuple.", "typing import Any, Dict, NamedTuple, Tuple from django.apps import AppConfig from django.conf import", "atomic write? I tried # open('w+b', buffering=0) but no luck. process = subprocess.Popen(", "a way to force it to be a single atomic write? I tried", "is_server_started is False: os.environ[\"DJANGO_SEVER_STARTING\"] = \"true\" return generate_schema(schema) def generate_schema(schema: str, skip_cache: bool", "stdin=subprocess.PIPE, ) out, error = process.communicate(encoded_schema) os.makedirs(\"client/generated\", exist_ok=True) with open(\"client/generated/index.tsx\", \"w+b\") as output:", "= NamedTuple(\"ParentTuple\", type_registry.items()) # type: ignore[misc] parent_schema, definitions = create_schema(ParentTuple, definitions_registry) definitions_registry.update(definitions) return", "\"object\", \"additionalProperties\": False, \"required\": list(global_types.keys()), \"properties\": global_types, }, }, }, } def get_templates()", "\"r+b\") as existing: already_generated = existing.read() if digest in already_generated: logger.info(\"Skipping generation as", "\"string\", } urls = extract_views_from_urlpatterns(urlpatterns) # type: ignore[no-untyped-call] reverse = {} for _,", "starts twice. So we prevent generation on the first start. TODO: handle noreload.", "or os.environ.get(\"RUN_MAIN\") == \"true\" ): # Triggers for the subprocess of the dev", "from .serialization import create_schema logger = logging.getLogger(\"django.server\") def get_urls_schema() -> Dict[str, Any]: urlconf", "\"string\", converters.UUIDConverter: \"string\", converters.SlugConverter: \"string\", converters.PathConverter: \"string\", } urls = extract_views_from_urlpatterns(urlpatterns) # type:", "is_server_started = \"DJANGO_SEVER_STARTING\" in os.environ if is_server_started is False: os.environ[\"DJANGO_SEVER_STARTING\"] = \"true\" return", "expose them. We can't just add these types to the `type_registry` because that's", "unaware of their usage. So we register them in `globals` and force `json-schema-to-typescript`", "force `json-schema-to-typescript` to expose them. We can't just add these types to the", "future. See `unreachableDefinitions` in json-schema-to-typescript \"\"\" type_registry[\"globals\"] = Any # type: ignore[assignment] context_processors", "pattern.converters.items() }, } return reverse def get_types_schema() -> Any: \"\"\" The package json-schema-to-typescript", "**{ **definitions[\"reactivated.apps.ParentTuple\"], \"properties\": { **definitions[\"reactivated.apps.ParentTuple\"][\"properties\"], \"globals\": { \"type\": \"object\", \"additionalProperties\": False, \"required\": list(global_types.keys()),", "parent tuple. We could explore doing two passes in the future. See `unreachableDefinitions`", "interfaces and client side code\") encoded_schema = schema.encode() import hashlib digest = hashlib.sha1(encoded_schema).hexdigest().encode()", "os.environ if is_server_started is False: os.environ[\"DJANGO_SEVER_STARTING\"] = \"true\" return generate_schema(schema) def generate_schema(schema: str,", "= False) -> None: \"\"\" For development usage only, this requires Node and", "context_processors.extend(engine[\"OPTIONS\"][\"context_processors\"]) # type: ignore[index] type_registry[\"Context\"] = create_context_processor_type(context_processors) ParentTuple = NamedTuple(\"ParentTuple\", type_registry.items()) # type:", "if skip_cache is False and os.path.exists(\"client/generated/index.tsx\"): with open(\"client/generated/index.tsx\", \"r+b\") as existing: already_generated =", ".serialization.context_processors import create_context_processor_type for engine in settings.TEMPLATES: if engine[\"BACKEND\"] == \"reactivated.backend.JSX\": context_processors.extend(engine[\"OPTIONS\"][\"context_processors\"]) #", "-> Dict[str, Tuple[Any]]: return template_registry def get_values() -> Dict[str, Any]: return value_registry def", "{ converters.IntConverter: \"number\", converters.StringConverter: \"string\", converters.UUIDConverter: \"string\", converters.SlugConverter: \"string\", converters.PathConverter: \"string\", } urls", "\"properties\": { **definitions[\"reactivated.apps.ParentTuple\"][\"properties\"], \"globals\": { \"type\": \"object\", \"additionalProperties\": False, \"required\": list(global_types.keys()), \"properties\": global_types,", "`json-schema-to-typescript` to expose them. We can't just add these types to the `type_registry`", "get_schema() if ( os.environ.get(\"WERKZEUG_RUN_MAIN\") == \"true\" or os.environ.get(\"RUN_MAIN\") == \"true\" ): # Triggers", "pattern in urls: if not isinstance(pattern, RoutePattern): continue reverse[name or regex] = {", "we make updates. # Maybe there's a way to force it to be", "already_generated: logger.info(\"Skipping generation as nothing has changed\") return #: Note that we don't", "converter_mapping = { converters.IntConverter: \"number\", converters.StringConverter: \"string\", converters.UUIDConverter: \"string\", converters.SlugConverter: \"string\", converters.PathConverter: \"string\",", "function for your E2E test prep. \"\"\" logger.info(\"Generating interfaces and client side code\")", "because that's only parsed once when generating the parent tuple. We could explore", "to stdout, because otherwise # webpack gets confused with the half-written file when", "the subprocess of the dev server after restarts or initial start. pass is_server_started", "in the future. See `unreachableDefinitions` in json-schema-to-typescript \"\"\" type_registry[\"globals\"] = Any # type:", "None: \"\"\" Django's dev server actually starts twice. So we prevent generation on", "any interface it sees. However, this can bloat our generated files. Instead, while", "{ arg_name: converter_mapping.get(arg_converter.__class__, \"string\") for arg_name, arg_converter in pattern.converters.items() }, } return reverse", "run into types that we want available globally but are not directly referenced", "logger.info(\"Generating interfaces and client side code\") encoded_schema = schema.encode() import hashlib digest =", "# Triggers for the subprocess of the dev server after restarts or initial", "or regex] = { \"route\": f\"/{regex}\", \"args\": { arg_name: converter_mapping.get(arg_converter.__class__, \"string\") for arg_name,", "name, pattern in urls: if not isinstance(pattern, RoutePattern): continue reverse[name or regex] =", "\"reactivated.backend.JSX\": context_processors.extend(engine[\"OPTIONS\"][\"context_processors\"]) # type: ignore[index] type_registry[\"Context\"] = create_context_processor_type(context_processors) ParentTuple = NamedTuple(\"ParentTuple\", type_registry.items()) #", "type_registry, value_registry, ) from .serialization import create_schema logger = logging.getLogger(\"django.server\") def get_urls_schema() ->", "to force it to be a single atomic write? I tried # open('w+b',", "in pattern.converters.items() }, } return reverse def get_types_schema() -> Any: \"\"\" The package", "For development usage only, this requires Node and Python installed You can use", "# type: ignore[no-untyped-call] reverse = {} for _, regex, name, pattern in urls:", "error = process.communicate(encoded_schema) os.makedirs(\"client/generated\", exist_ok=True) with open(\"client/generated/index.tsx\", \"w+b\") as output: output.write(b\"// Digest: %s\\n\"", "if is_server_started is False: os.environ[\"DJANGO_SEVER_STARTING\"] = \"true\" return generate_schema(schema) def generate_schema(schema: str, skip_cache:", "logging.getLogger(\"django.server\") def get_urls_schema() -> Dict[str, Any]: urlconf = importlib.import_module(settings.ROOT_URLCONF) urlpatterns = urlconf.urlpatterns #", "them. We can't just add these types to the `type_registry` because that's only", "os.makedirs(\"client/generated\", exist_ok=True) with open(\"client/generated/index.tsx\", \"w+b\") as output: output.write(b\"// Digest: %s\\n\" % digest) output.write(out)", "converters.StringConverter: \"string\", converters.UUIDConverter: \"string\", converters.SlugConverter: \"string\", converters.PathConverter: \"string\", } urls = extract_views_from_urlpatterns(urlpatterns) #", "converters from django.urls.resolvers import RoutePattern converter_mapping = { converters.IntConverter: \"number\", converters.StringConverter: \"string\", converters.UUIDConverter:", "pass is_server_started = \"DJANGO_SEVER_STARTING\" in os.environ if is_server_started is False: os.environ[\"DJANGO_SEVER_STARTING\"] = \"true\"", "\"./node_modules/reactivated/generator.js\"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, ) out, error = process.communicate(encoded_schema) os.makedirs(\"client/generated\", exist_ok=True) with open(\"client/generated/index.tsx\", \"w+b\")", "generate_schema(schema: str, skip_cache: bool = False) -> None: \"\"\" For development usage only,", "nothing has changed\") return #: Note that we don't pass the file object", "f\"/{regex}\", \"args\": { arg_name: converter_mapping.get(arg_converter.__class__, \"string\") for arg_name, arg_converter in pattern.converters.items() }, }", "development usage only, this requires Node and Python installed You can use this", "): # Triggers for the subprocess of the dev server after restarts or", "way to force it to be a single atomic write? I tried #", "= { \"urls\": get_urls_schema(), \"templates\": get_templates(), \"types\": get_types_schema(), \"values\": get_values(), } return json.dumps(schema,", "import importlib import json import logging import os import subprocess from typing import", "`unreachableDefinitions` in json-schema-to-typescript \"\"\" type_registry[\"globals\"] = Any # type: ignore[assignment] context_processors = []", "\"DJANGO_SEVER_STARTING\" in os.environ if is_server_started is False: os.environ[\"DJANGO_SEVER_STARTING\"] = \"true\" return generate_schema(schema) def", "but are not directly referenced by templates. These aren't exported by `json-schem-to-typescript` because", "converter_mapping.get(arg_converter.__class__, \"string\") for arg_name, arg_converter in pattern.converters.items() }, } return reverse def get_types_schema()", "= { converters.IntConverter: \"number\", converters.StringConverter: \"string\", converters.UUIDConverter: \"string\", converters.SlugConverter: \"string\", converters.PathConverter: \"string\", }", "we want available globally but are not directly referenced by templates. These aren't", "I tried # open('w+b', buffering=0) but no luck. process = subprocess.Popen( [\"node\", \"./node_modules/reactivated/generator.js\"],", "tried # open('w+b', buffering=0) but no luck. process = subprocess.Popen( [\"node\", \"./node_modules/reactivated/generator.js\"], stdout=subprocess.PIPE,", "process = subprocess.Popen( [\"node\", \"./node_modules/reactivated/generator.js\"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, ) out, error = process.communicate(encoded_schema) os.makedirs(\"client/generated\",", "import create_context_processor_type for engine in settings.TEMPLATES: if engine[\"BACKEND\"] == \"reactivated.backend.JSX\": context_processors.extend(engine[\"OPTIONS\"][\"context_processors\"]) # type:", "Tuple from django.apps import AppConfig from django.conf import settings from . import (", "expose a way to automatically export any interface it sees. However, this can", "the first start. TODO: handle noreload. \"\"\" schema = get_schema() if ( os.environ.get(\"WERKZEUG_RUN_MAIN\")", "\"\"\" The package json-schema-to-typescript does expose a way to automatically export any interface", "type: ignore[no-untyped-call] reverse = {} for _, regex, name, pattern in urls: if", "schema = { \"urls\": get_urls_schema(), \"templates\": get_templates(), \"types\": get_types_schema(), \"values\": get_values(), } return", "by `json-schem-to-typescript` because they're referenced using `tsType`, so the libraary is unaware of", "`type_registry` because that's only parsed once when generating the parent tuple. We could" ]
[ "if key == keyboard.Key.shift: self.holding_shift = False def keep_from_dying(self): self.keep_from_dying_thread = threading.Timer(1000000, lambda", "= None self.holding_shift = False self.key_listener = keyboard.Listener(on_press=self.on_keydown, on_release=self.on_keyup) self.key_listener.start() def on_keydown(self, key:", "key: keyboard.Key): char = hasattr(key, 'char') if char: if self.holding_shift and key.char.lower() ==", "key == keyboard.Key.shift: self.holding_shift = False def keep_from_dying(self): self.keep_from_dying_thread = threading.Timer(1000000, lambda :", "keyboard.Key): char = hasattr(key, 'char') if char: if self.holding_shift and key.char.lower() == 'b':", "on_keyup(self, key: keyboard.Key): if key == keyboard.Key.shift: self.holding_shift = False def keep_from_dying(self): self.keep_from_dying_thread", "self.key_listener.start() def on_keydown(self, key: keyboard.Key): char = hasattr(key, 'char') if char: if self.holding_shift", "if key == keyboard.Key.shift: self.holding_shift = True def on_keyup(self, key: keyboard.Key): if key", "= False self.key_listener = keyboard.Listener(on_press=self.on_keydown, on_release=self.on_keyup) self.key_listener.start() def on_keydown(self, key: keyboard.Key): char =", "from pynput import keyboard class KeyPresses(): def __init__(self): self.keep_from_dying_thread = None self.holding_shift =", "class KeyPresses(): def __init__(self): self.keep_from_dying_thread = None self.holding_shift = False self.key_listener = keyboard.Listener(on_press=self.on_keydown,", "self.holding_shift and key.char.lower() == 'b': print('Shift B') else: if key == keyboard.Key.esc: self.key_listener.stop()", "key == keyboard.Key.esc: self.key_listener.stop() self.keep_from_dying_thread.cancel() if key == keyboard.Key.shift: self.holding_shift = True def", "def keep_from_dying(self): self.keep_from_dying_thread = threading.Timer(1000000, lambda : None) self.keep_from_dying_thread.start() k = KeyPresses() k.keep_from_dying()", "def __init__(self): self.keep_from_dying_thread = None self.holding_shift = False self.key_listener = keyboard.Listener(on_press=self.on_keydown, on_release=self.on_keyup) self.key_listener.start()", "== keyboard.Key.esc: self.key_listener.stop() self.keep_from_dying_thread.cancel() if key == keyboard.Key.shift: self.holding_shift = True def on_keyup(self,", "self.holding_shift = True def on_keyup(self, key: keyboard.Key): if key == keyboard.Key.shift: self.holding_shift =", "self.holding_shift = False self.key_listener = keyboard.Listener(on_press=self.on_keydown, on_release=self.on_keyup) self.key_listener.start() def on_keydown(self, key: keyboard.Key): char", "keyboard.Key.esc: self.key_listener.stop() self.keep_from_dying_thread.cancel() if key == keyboard.Key.shift: self.holding_shift = True def on_keyup(self, key:", "self.holding_shift = False def keep_from_dying(self): self.keep_from_dying_thread = threading.Timer(1000000, lambda : None) self.keep_from_dying_thread.start() k", "keyboard.Key): if key == keyboard.Key.shift: self.holding_shift = False def keep_from_dying(self): self.keep_from_dying_thread = threading.Timer(1000000,", "== keyboard.Key.shift: self.holding_shift = False def keep_from_dying(self): self.keep_from_dying_thread = threading.Timer(1000000, lambda : None)", "def on_keydown(self, key: keyboard.Key): char = hasattr(key, 'char') if char: if self.holding_shift and", "= keyboard.Listener(on_press=self.on_keydown, on_release=self.on_keyup) self.key_listener.start() def on_keydown(self, key: keyboard.Key): char = hasattr(key, 'char') if", "hasattr(key, 'char') if char: if self.holding_shift and key.char.lower() == 'b': print('Shift B') else:", "self.key_listener.stop() self.keep_from_dying_thread.cancel() if key == keyboard.Key.shift: self.holding_shift = True def on_keyup(self, key: keyboard.Key):", "char = hasattr(key, 'char') if char: if self.holding_shift and key.char.lower() == 'b': print('Shift", "self.keep_from_dying_thread = None self.holding_shift = False self.key_listener = keyboard.Listener(on_press=self.on_keydown, on_release=self.on_keyup) self.key_listener.start() def on_keydown(self,", "__init__(self): self.keep_from_dying_thread = None self.holding_shift = False self.key_listener = keyboard.Listener(on_press=self.on_keydown, on_release=self.on_keyup) self.key_listener.start() def", "import threading from pynput import keyboard class KeyPresses(): def __init__(self): self.keep_from_dying_thread = None", "pynput import keyboard class KeyPresses(): def __init__(self): self.keep_from_dying_thread = None self.holding_shift = False", "on_keydown(self, key: keyboard.Key): char = hasattr(key, 'char') if char: if self.holding_shift and key.char.lower()", "== keyboard.Key.shift: self.holding_shift = True def on_keyup(self, key: keyboard.Key): if key == keyboard.Key.shift:", "key.char.lower() == 'b': print('Shift B') else: if key == keyboard.Key.esc: self.key_listener.stop() self.keep_from_dying_thread.cancel() if", "print('Shift B') else: if key == keyboard.Key.esc: self.key_listener.stop() self.keep_from_dying_thread.cancel() if key == keyboard.Key.shift:", "def on_keyup(self, key: keyboard.Key): if key == keyboard.Key.shift: self.holding_shift = False def keep_from_dying(self):", "= False def keep_from_dying(self): self.keep_from_dying_thread = threading.Timer(1000000, lambda : None) self.keep_from_dying_thread.start() k =", "= hasattr(key, 'char') if char: if self.holding_shift and key.char.lower() == 'b': print('Shift B')", "if key == keyboard.Key.esc: self.key_listener.stop() self.keep_from_dying_thread.cancel() if key == keyboard.Key.shift: self.holding_shift = True", "else: if key == keyboard.Key.esc: self.key_listener.stop() self.keep_from_dying_thread.cancel() if key == keyboard.Key.shift: self.holding_shift =", "char: if self.holding_shift and key.char.lower() == 'b': print('Shift B') else: if key ==", "None self.holding_shift = False self.key_listener = keyboard.Listener(on_press=self.on_keydown, on_release=self.on_keyup) self.key_listener.start() def on_keydown(self, key: keyboard.Key):", "== 'b': print('Shift B') else: if key == keyboard.Key.esc: self.key_listener.stop() self.keep_from_dying_thread.cancel() if key", "on_release=self.on_keyup) self.key_listener.start() def on_keydown(self, key: keyboard.Key): char = hasattr(key, 'char') if char: if", "if self.holding_shift and key.char.lower() == 'b': print('Shift B') else: if key == keyboard.Key.esc:", "KeyPresses(): def __init__(self): self.keep_from_dying_thread = None self.holding_shift = False self.key_listener = keyboard.Listener(on_press=self.on_keydown, on_release=self.on_keyup)", "import keyboard class KeyPresses(): def __init__(self): self.keep_from_dying_thread = None self.holding_shift = False self.key_listener", "key == keyboard.Key.shift: self.holding_shift = True def on_keyup(self, key: keyboard.Key): if key ==", "keyboard class KeyPresses(): def __init__(self): self.keep_from_dying_thread = None self.holding_shift = False self.key_listener =", "keyboard.Key.shift: self.holding_shift = True def on_keyup(self, key: keyboard.Key): if key == keyboard.Key.shift: self.holding_shift", "'b': print('Shift B') else: if key == keyboard.Key.esc: self.key_listener.stop() self.keep_from_dying_thread.cancel() if key ==", "keyboard.Listener(on_press=self.on_keydown, on_release=self.on_keyup) self.key_listener.start() def on_keydown(self, key: keyboard.Key): char = hasattr(key, 'char') if char:", "'char') if char: if self.holding_shift and key.char.lower() == 'b': print('Shift B') else: if", "B') else: if key == keyboard.Key.esc: self.key_listener.stop() self.keep_from_dying_thread.cancel() if key == keyboard.Key.shift: self.holding_shift", "True def on_keyup(self, key: keyboard.Key): if key == keyboard.Key.shift: self.holding_shift = False def", "= True def on_keyup(self, key: keyboard.Key): if key == keyboard.Key.shift: self.holding_shift = False", "threading from pynput import keyboard class KeyPresses(): def __init__(self): self.keep_from_dying_thread = None self.holding_shift", "key: keyboard.Key): if key == keyboard.Key.shift: self.holding_shift = False def keep_from_dying(self): self.keep_from_dying_thread =", "if char: if self.holding_shift and key.char.lower() == 'b': print('Shift B') else: if key", "self.keep_from_dying_thread.cancel() if key == keyboard.Key.shift: self.holding_shift = True def on_keyup(self, key: keyboard.Key): if", "False def keep_from_dying(self): self.keep_from_dying_thread = threading.Timer(1000000, lambda : None) self.keep_from_dying_thread.start() k = KeyPresses()", "False self.key_listener = keyboard.Listener(on_press=self.on_keydown, on_release=self.on_keyup) self.key_listener.start() def on_keydown(self, key: keyboard.Key): char = hasattr(key,", "self.key_listener = keyboard.Listener(on_press=self.on_keydown, on_release=self.on_keyup) self.key_listener.start() def on_keydown(self, key: keyboard.Key): char = hasattr(key, 'char')", "keyboard.Key.shift: self.holding_shift = False def keep_from_dying(self): self.keep_from_dying_thread = threading.Timer(1000000, lambda : None) self.keep_from_dying_thread.start()", "and key.char.lower() == 'b': print('Shift B') else: if key == keyboard.Key.esc: self.key_listener.stop() self.keep_from_dying_thread.cancel()" ]
[ "= DrawingSpec(color=BLACK_COLOR, thickness=5), elevation: int = 10, azimuth: int = 10, ): \"\"\"Plot", "= 2 # Circle radius. Default to 2 pixels. circle_radius: int = 2", "plotted_landmarks and end_idx in plotted_landmarks: landmark_pair = [ plotted_landmarks[start_idx], plotted_landmarks[end_idx], ] ax.plot3D( xs=[landmark_pair[0][0],", "landmark_list_all.load_df(df) # Plot every frame index = 0 counter = 0 for i", "= plt.figure(figsize=(10, 10)) ax = plt.axes(projection=\"3d\") if Min_Max_axis: ax.set_xlim3d(-1 * Min_Max_axis[\"z_max\"], -1 *", "self.landmark_list[i].y = y[i] self.landmark_list[i].z = z[i] if visibility != None: self.landmark_list[i].visibility = visibility[i]", "tuples that specifies how landmarks to be connected. landmark_drawing_spec: A DrawingSpec object that", "both visible. for connection in connections: start_idx = connection[0] end_idx = connection[1] if", "return tuple(v / 255.0 for v in color) def plot_landmarks( landmark_list, connections: Optional[List[Tuple[int,", "in color) def plot_landmarks( landmark_list, connections: Optional[List[Tuple[int, int]]] = None, counter=None, IMAGE_PATH=None, Min_Max_axis=None,", "= config self.list_size = self.config.NUM_COORDS self.landmark_list = [] for i in range(self.list_size): obj", "return df def save_image(config, csv_file, IMAGE_PATH): landmark_list_all = Landmark_list(config) df = landmark_list_all.load_csv(csv_file) landmark_list_all.load_df(df)", "plotted_landmarks[idx] = (-landmark.z, landmark.x, -landmark.y) if connections: num_landmarks = landmark_list.list_size # Draws the", "for i in range(self.list_size): obj = Landmark() self.landmark_list.append(obj) self.Min_Max_axis = { \"x_min\": None,", "BLACK_COLOR = (0, 0, 0) RED_COLOR = (0, 0, 255) GREEN_COLOR = (0,", "Landmark_list(config) df = landmark_list_all.load_csv(csv_file) landmark_list_all.load_df(df) # Plot every frame index = 0 counter", "import Path import os import cv2 NUM_COORDS = 33 WHITE_COLOR = (224, 224,", "df.columns.str.startswith(\"v\").any(): self.load_xyz(x, y, z, visibility) else: self.load_xyz(x, y, z) def load_csv(self, RESULT_CSV): df", "connetions contain invalid landmark index. \"\"\" if not landmark_list: return fig = plt.figure(figsize=(10,", "= 0.5 _RGB_CHANNELS = 3 class Landmark: def __init__(self, x=None, y=None, z=None, visibility=None):", "in images: image_list.append(imageio.imread(file_name)) imageio.mimwrite(\"animated_from_images.gif\", image_list) \"\"\" from pygifsicle import optimize gif_path = 'animated_from_video.gif'#", "original one optimize(gif_path) \"\"\" def save_video(IMAGE_PATH): image_folder = IMAGE_PATH video_name = \"video.avi\" images", "load_xyz(self, x, y, z, visibility=None): for i, landmark in enumerate(self.landmark_list): self.landmark_list[i].x = x[i]", "self.landmark_list = [] for i in range(self.list_size): obj = Landmark() self.landmark_list.append(obj) self.Min_Max_axis =", "x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) image_list = [] for file_name in images: image_list.append(imageio.imread(file_name)) imageio.mimwrite(\"animated_from_images.gif\", image_list)", "= max(x) self.Min_Max_axis[\"y_min\"] = min(y) self.Min_Max_axis[\"y_max\"] = max(y) self.Min_Max_axis[\"z_min\"] = min(z) self.Min_Max_axis[\"z_max\"] =", "pathlib import Path import os import cv2 NUM_COORDS = 33 WHITE_COLOR = (224,", "!= False: visibility = df.loc[:, df.columns.str.startswith(\"v\")].to_numpy().flatten() self.Min_Max_axis[\"x_min\"] = min(x) self.Min_Max_axis[\"x_max\"] = max(x) self.Min_Max_axis[\"y_min\"]", "self.Min_Max_axis[\"x_max\"] = max(x) self.Min_Max_axis[\"y_min\"] = min(y) self.Min_Max_axis[\"y_max\"] = max(y) self.Min_Max_axis[\"z_min\"] = min(z) self.Min_Max_axis[\"z_max\"]", "= 10, azimuth: int = 10, ): \"\"\"Plot the landmarks and the connections", "connections: start_idx = connection[0] end_idx = connection[1] if not (0 <= start_idx <", "int = 10, azimuth: int = 10, ): \"\"\"Plot the landmarks and the", "import matplotlib.pyplot as plt import pandas as pd import imageio from pathlib import", "= y[i] self.landmark_list[i].z = z[i] if visibility != None: self.landmark_list[i].visibility = visibility[i] def", "def __init__(self, x=None, y=None, z=None, visibility=None): self.x = None self.y = None self.z", "raise ValueError( f\"Landmark index is out of range. Invalid connection \" f\"from landmark", "color and line thickness. elevation: The elevation from which to view the plot.", "and landmark.visibility < _VISIBILITY_THRESHOLD: continue ax.scatter3D( xs=[-landmark.z], ys=[landmark.x], zs=[-landmark.y], color=_normalize_color(landmark_drawing_spec.color[::-1]), linewidth=landmark_drawing_spec.thickness, ) plotted_landmarks[idx]", "the plot_landmarks from MediaPipe # https://github.com/google/mediapipe/blob/master/mediapipe/python/solutions/drawing_utils.py @dataclasses.dataclass class DrawingSpec: # Color for drawing", "f\"Landmark index is out of range. Invalid connection \" f\"from landmark #{start_idx} to", "List, Mapping, Optional, Tuple, Union from model_setup import Model_Setup import matplotlib.pyplot as plt", "= min(x) self.Min_Max_axis[\"x_max\"] = max(x) self.Min_Max_axis[\"y_min\"] = min(y) self.Min_Max_axis[\"y_max\"] = max(y) self.Min_Max_axis[\"z_min\"] =", "landmark_list.load_xyz(x, y, z, visibility) plot_landmarks( landmark_list, config.POSE_CONNECTIONS, counter=counter, IMAGE_PATH=IMAGE_PATH, Min_Max_axis=landmark_list_all.Min_Max_axis, ) counter +=", "+= 1 # Adopt the plot_landmarks from MediaPipe # https://github.com/google/mediapipe/blob/master/mediapipe/python/solutions/drawing_utils.py @dataclasses.dataclass class DrawingSpec:", "1 # Adopt the plot_landmarks from MediaPipe # https://github.com/google/mediapipe/blob/master/mediapipe/python/solutions/drawing_utils.py @dataclasses.dataclass class DrawingSpec: #", "class Landmark_list: def __init__(self, config): self.config = config self.list_size = self.config.NUM_COORDS self.landmark_list =", "message to be plotted. connections: A list of landmark index tuples that specifies", "pygifsicle import optimize gif_path = 'animated_from_video.gif'# create a new one optimize(gif_path, 'animated_from_video_optimized.gif')# overwrite", "\"y_max\": None, \"z_min\": None, \"z_max\": None, } def load_xyz(self, x, y, z, visibility=None):", "that specifies the landmarks' drawing settings such as color and line thickness. connection_drawing_spec:", "df_temp = df.iloc[i, :] x = df_temp[df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df_temp[df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df_temp[df.columns.str.startswith(\"z\")].to_numpy().flatten()", "255) GREEN_COLOR = (0, 128, 0) BLUE_COLOR = (255, 0, 0) _PRESENCE_THRESHOLD =", "to be connected. landmark_drawing_spec: A DrawingSpec object that specifies the landmarks' drawing settings", "not landmark_list: return fig = plt.figure(figsize=(10, 10)) ax = plt.axes(projection=\"3d\") if Min_Max_axis: ax.set_xlim3d(-1", "+= 1 index += 1 # Adopt the plot_landmarks from MediaPipe # https://github.com/google/mediapipe/blob/master/mediapipe/python/solutions/drawing_utils.py", "azimuth: the azimuth angle to rotate the plot. Raises: ValueError: If any connetions", "0) BLUE_COLOR = (255, 0, 0) _PRESENCE_THRESHOLD = 0.5 _VISIBILITY_THRESHOLD = 0.5 _RGB_CHANNELS", "= df.iloc[i, :] x = df_temp[df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df_temp[df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df_temp[df.columns.str.startswith(\"z\")].to_numpy().flatten() visibility", ") if start_idx in plotted_landmarks and end_idx in plotted_landmarks: landmark_pair = [ plotted_landmarks[start_idx],", "images[0])) height, width, layers = frame.shape fps = 24 video = cv2.VideoWriter( video_name,", "self.Min_Max_axis[\"z_min\"] = min(z) self.Min_Max_axis[\"z_max\"] = max(z) if df.columns.str.startswith(\"v\").any(): self.load_xyz(x, y, z, visibility) else:", "Optional, Tuple, Union from model_setup import Model_Setup import matplotlib.pyplot as plt import pandas", "self.landmark_list[i].x = x[i] self.landmark_list[i].y = y[i] self.landmark_list[i].z = z[i] if visibility != None:", "(-landmark.z, landmark.x, -landmark.y) if connections: num_landmarks = landmark_list.list_size # Draws the connections if", "optimize(gif_path, 'animated_from_video_optimized.gif')# overwrite the original one optimize(gif_path) \"\"\" def save_video(IMAGE_PATH): image_folder = IMAGE_PATH", "ax.scatter3D( xs=[-landmark.z], ys=[landmark.x], zs=[-landmark.y], color=_normalize_color(landmark_drawing_spec.color[::-1]), linewidth=landmark_drawing_spec.thickness, ) plotted_landmarks[idx] = (-landmark.z, landmark.x, -landmark.y) if", "img.endswith(\".png\")] images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) frame = cv2.imread(os.path.join(image_folder, images[0])) height, width, layers =", "landmark_list: A normalized landmark list proto message to be plotted. connections: A list", "linewidth=connection_drawing_spec.thickness, ) plt.savefig(os.path.join(IMAGE_PATH, \"fram_sec_{}.png\".format(counter)), dpi=50) def save_gif(IMAGE_PATH): # https://medium.com/swlh/python-animated-images-6a85b9b68f86 image_path = Path(IMAGE_PATH) images", "specifies the landmarks' drawing settings such as color and line thickness. connection_drawing_spec: A", "counter = 0 for i in range(480,len(df)): if index % 1 == 0:", "from MediaPipe # https://github.com/google/mediapipe/blob/master/mediapipe/python/solutions/drawing_utils.py @dataclasses.dataclass class DrawingSpec: # Color for drawing the annotation.", "connection in connections: start_idx = connection[0] end_idx = connection[1] if not (0 <=", "drawing settings such as color and line thickness. connection_drawing_spec: A DrawingSpec object that", "#{start_idx} to landmark #{end_idx}.\" ) if start_idx in plotted_landmarks and end_idx in plotted_landmarks:", "224) BLACK_COLOR = (0, 0, 0) RED_COLOR = (0, 0, 255) GREEN_COLOR =", "visibility) else: self.load_xyz(x, y, z) def load_csv(self, RESULT_CSV): df = pd.read_csv(RESULT_CSV) self.load_df(df) return", "visibility = df_temp[df.columns.str.startswith(\"v\")].to_numpy().flatten() landmark_list.load_xyz(x, y, z, visibility) plot_landmarks( landmark_list, config.POSE_CONNECTIONS, counter=counter, IMAGE_PATH=IMAGE_PATH, Min_Max_axis=landmark_list_all.Min_Max_axis,", "ys=[landmark_pair[0][1], landmark_pair[1][1]], zs=[landmark_pair[0][2], landmark_pair[1][2]], color=_normalize_color(connection_drawing_spec.color[::-1]), linewidth=connection_drawing_spec.thickness, ) plt.savefig(os.path.join(IMAGE_PATH, \"fram_sec_{}.png\".format(counter)), dpi=50) def save_gif(IMAGE_PATH): #", "pixels. thickness: int = 2 # Circle radius. Default to 2 pixels. circle_radius:", "__init__(self, config): self.config = config self.list_size = self.config.NUM_COORDS self.landmark_list = [] for i", "radius. Default to 2 pixels. circle_radius: int = 2 def _normalize_color(color): return tuple(v", "annotation. Default to 2 pixels. thickness: int = 2 # Circle radius. Default", "Default to 2 pixels. circle_radius: int = 2 def _normalize_color(color): return tuple(v /", "such as color and line thickness. connection_drawing_spec: A DrawingSpec object that specifies the", "None, \"y_max\": None, \"z_min\": None, \"z_max\": None, } def load_xyz(self, x, y, z,", "which to view the plot. azimuth: the azimuth angle to rotate the plot.", "specifies how landmarks to be connected. landmark_drawing_spec: A DrawingSpec object that specifies the", "= Landmark_list(config) df_temp = df.iloc[i, :] x = df_temp[df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df_temp[df.columns.str.startswith(\"y\")].to_numpy().flatten() z", "A DrawingSpec object that specifies the landmarks' drawing settings such as color and", "landmark_list_all = Landmark_list(config) df = landmark_list_all.load_csv(csv_file) landmark_list_all.load_df(df) # Plot every frame index =", "the connections' drawing settings such as color and line thickness. elevation: The elevation", "0: landmark_list = Landmark_list(config) df_temp = df.iloc[i, :] x = df_temp[df.columns.str.startswith(\"x\")].to_numpy().flatten() y =", "= max(z) if df.columns.str.startswith(\"v\").any(): self.load_xyz(x, y, z, visibility) else: self.load_xyz(x, y, z) def", "if not landmark_list: return fig = plt.figure(figsize=(10, 10)) ax = plt.axes(projection=\"3d\") if Min_Max_axis:", "num_landmarks = landmark_list.list_size # Draws the connections if the start and end landmarks", "df_temp[df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df_temp[df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df_temp[df.columns.str.startswith(\"z\")].to_numpy().flatten() visibility = df_temp[df.columns.str.startswith(\"v\")].to_numpy().flatten() landmark_list.load_xyz(x, y, z,", "# https://github.com/google/mediapipe/blob/master/mediapipe/python/solutions/drawing_utils.py @dataclasses.dataclass class DrawingSpec: # Color for drawing the annotation. Default to", "connections in matplotlib 3d. Args: landmark_list: A normalized landmark list proto message to", "index. \"\"\" if not landmark_list: return fig = plt.figure(figsize=(10, 10)) ax = plt.axes(projection=\"3d\")", "2 pixels. thickness: int = 2 # Circle radius. Default to 2 pixels.", "and line thickness. elevation: The elevation from which to view the plot. azimuth:", "pixels. circle_radius: int = 2 def _normalize_color(color): return tuple(v / 255.0 for v", "frame = cv2.imread(os.path.join(image_folder, images[0])) height, width, layers = frame.shape fps = 24 video", "df def save_image(config, csv_file, IMAGE_PATH): landmark_list_all = Landmark_list(config) df = landmark_list_all.load_csv(csv_file) landmark_list_all.load_df(df) #", "in os.listdir(image_folder) if img.endswith(\".png\")] images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) frame = cv2.imread(os.path.join(image_folder, images[0])) height,", "in plotted_landmarks and end_idx in plotted_landmarks: landmark_pair = [ plotted_landmarks[start_idx], plotted_landmarks[end_idx], ] ax.plot3D(", "= df.loc[:, df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df.loc[:, df.columns.str.startswith(\"z\")].to_numpy().flatten() if df.columns.str.startswith(\"v\").any() != False: visibility =", "image_list) \"\"\" from pygifsicle import optimize gif_path = 'animated_from_video.gif'# create a new one", "visibility) plot_landmarks( landmark_list, config.POSE_CONNECTIONS, counter=counter, IMAGE_PATH=IMAGE_PATH, Min_Max_axis=landmark_list_all.Min_Max_axis, ) counter += 1 index +=", "] ax.plot3D( xs=[landmark_pair[0][0], landmark_pair[1][0]], ys=[landmark_pair[0][1], landmark_pair[1][1]], zs=[landmark_pair[0][2], landmark_pair[1][2]], color=_normalize_color(connection_drawing_spec.color[::-1]), linewidth=connection_drawing_spec.thickness, ) plt.savefig(os.path.join(IMAGE_PATH, \"fram_sec_{}.png\".format(counter)),", "z, visibility=None): for i, landmark in enumerate(self.landmark_list): self.landmark_list[i].x = x[i] self.landmark_list[i].y = y[i]", "if not (0 <= start_idx < num_landmarks and 0 <= end_idx < num_landmarks):", "= {} for idx, landmark in enumerate(landmark_list.landmark_list): if landmark.visibility and landmark.visibility < _VISIBILITY_THRESHOLD:", "33 WHITE_COLOR = (224, 224, 224) BLACK_COLOR = (0, 0, 0) RED_COLOR =", "IMAGE_PATH=IMAGE_PATH, Min_Max_axis=landmark_list_all.Min_Max_axis, ) counter += 1 index += 1 # Adopt the plot_landmarks", "Tuple[int, int, int] = WHITE_COLOR # Thickness for drawing the annotation. Default to", "[img for img in os.listdir(image_folder) if img.endswith(\".png\")] images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) frame =", "Thickness for drawing the annotation. Default to 2 pixels. thickness: int = 2", "df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df.loc[:, df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df.loc[:, df.columns.str.startswith(\"z\")].to_numpy().flatten() if df.columns.str.startswith(\"v\").any() != False:", "landmark.visibility < _VISIBILITY_THRESHOLD: continue ax.scatter3D( xs=[-landmark.z], ys=[landmark.x], zs=[-landmark.y], color=_normalize_color(landmark_drawing_spec.color[::-1]), linewidth=landmark_drawing_spec.thickness, ) plotted_landmarks[idx] =", "the annotation. Default to the white color. color: Tuple[int, int, int] = WHITE_COLOR", "DrawingSpec(color=RED_COLOR, thickness=5), connection_drawing_spec: DrawingSpec = DrawingSpec(color=BLACK_COLOR, thickness=5), elevation: int = 10, azimuth: int", "elevation: The elevation from which to view the plot. azimuth: the azimuth angle", "df_temp[df.columns.str.startswith(\"v\")].to_numpy().flatten() landmark_list.load_xyz(x, y, z, visibility) plot_landmarks( landmark_list, config.POSE_CONNECTIONS, counter=counter, IMAGE_PATH=IMAGE_PATH, Min_Max_axis=landmark_list_all.Min_Max_axis, ) counter", "self.landmark_list[i].visibility = visibility[i] def load_df(self, df): x = df.loc[:, df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df.loc[:,", "\"y_min\": None, \"y_max\": None, \"z_min\": None, \"z_max\": None, } def load_xyz(self, x, y,", "landmark #{end_idx}.\" ) if start_idx in plotted_landmarks and end_idx in plotted_landmarks: landmark_pair =", "__init__(self, x=None, y=None, z=None, visibility=None): self.x = None self.y = None self.z =", "* Min_Max_axis[\"y_min\"]) ax.view_init(elev=elevation, azim=azimuth) plotted_landmarks = {} for idx, landmark in enumerate(landmark_list.landmark_list): if", "df = landmark_list_all.load_csv(csv_file) landmark_list_all.load_df(df) # Plot every frame index = 0 counter =", "color) def plot_landmarks( landmark_list, connections: Optional[List[Tuple[int, int]]] = None, counter=None, IMAGE_PATH=None, Min_Max_axis=None, landmark_drawing_spec:", "and 0 <= end_idx < num_landmarks): raise ValueError( f\"Landmark index is out of", "0) RED_COLOR = (0, 0, 255) GREEN_COLOR = (0, 128, 0) BLUE_COLOR =", "= df.loc[:, df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df.loc[:, df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df.loc[:, df.columns.str.startswith(\"z\")].to_numpy().flatten() if df.columns.str.startswith(\"v\").any()", "landmark index. \"\"\" if not landmark_list: return fig = plt.figure(figsize=(10, 10)) ax =", "= 3 class Landmark: def __init__(self, x=None, y=None, z=None, visibility=None): self.x = None", "and the connections in matplotlib 3d. Args: landmark_list: A normalized landmark list proto", "i in range(480,len(df)): if index % 1 == 0: landmark_list = Landmark_list(config) df_temp", "False: visibility = df.loc[:, df.columns.str.startswith(\"v\")].to_numpy().flatten() self.Min_Max_axis[\"x_min\"] = min(x) self.Min_Max_axis[\"x_max\"] = max(x) self.Min_Max_axis[\"y_min\"] =", "plt.figure(figsize=(10, 10)) ax = plt.axes(projection=\"3d\") if Min_Max_axis: ax.set_xlim3d(-1 * Min_Max_axis[\"z_max\"], -1 * Min_Max_axis[\"z_min\"])", "(255, 0, 0) _PRESENCE_THRESHOLD = 0.5 _VISIBILITY_THRESHOLD = 0.5 _RGB_CHANNELS = 3 class", "num_landmarks and 0 <= end_idx < num_landmarks): raise ValueError( f\"Landmark index is out", "index = 0 counter = 0 for i in range(480,len(df)): if index %", "'animated_from_video.gif'# create a new one optimize(gif_path, 'animated_from_video_optimized.gif')# overwrite the original one optimize(gif_path) \"\"\"", "load_csv(self, RESULT_CSV): df = pd.read_csv(RESULT_CSV) self.load_df(df) return df def save_image(config, csv_file, IMAGE_PATH): landmark_list_all", "connection[1] if not (0 <= start_idx < num_landmarks and 0 <= end_idx <", "None, \"y_min\": None, \"y_max\": None, \"z_min\": None, \"z_max\": None, } def load_xyz(self, x,", "video_name = \"video.avi\" images = [img for img in os.listdir(image_folder) if img.endswith(\".png\")] images.sort(key=lambda", "Landmark: def __init__(self, x=None, y=None, z=None, visibility=None): self.x = None self.y = None", "images = list(image_path.glob(\"*.png\")) images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) image_list = [] for file_name in", "None self.z = None self.visibility = None class Landmark_list: def __init__(self, config): self.config", "\"x_min\": None, \"x_max\": None, \"y_min\": None, \"y_max\": None, \"z_min\": None, \"z_max\": None, }", "Invalid connection \" f\"from landmark #{start_idx} to landmark #{end_idx}.\" ) if start_idx in", "Default to the white color. color: Tuple[int, int, int] = WHITE_COLOR # Thickness", "y=None, z=None, visibility=None): self.x = None self.y = None self.z = None self.visibility", "= 'animated_from_video.gif'# create a new one optimize(gif_path, 'animated_from_video_optimized.gif')# overwrite the original one optimize(gif_path)", "int]]] = None, counter=None, IMAGE_PATH=None, Min_Max_axis=None, landmark_drawing_spec: DrawingSpec = DrawingSpec(color=RED_COLOR, thickness=5), connection_drawing_spec: DrawingSpec", "# https://medium.com/swlh/python-animated-images-6a85b9b68f86 image_path = Path(IMAGE_PATH) images = list(image_path.glob(\"*.png\")) images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) image_list", "landmark_list, config.POSE_CONNECTIONS, counter=counter, IMAGE_PATH=IMAGE_PATH, Min_Max_axis=landmark_list_all.Min_Max_axis, ) counter += 1 index += 1 #", "= [img for img in os.listdir(image_folder) if img.endswith(\".png\")] images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) frame", "\"x_max\": None, \"y_min\": None, \"y_max\": None, \"z_min\": None, \"z_max\": None, } def load_xyz(self,", "normalized landmark list proto message to be plotted. connections: A list of landmark", "pandas as pd import imageio from pathlib import Path import os import cv2", "in connections: start_idx = connection[0] end_idx = connection[1] if not (0 <= start_idx", "import optimize gif_path = 'animated_from_video.gif'# create a new one optimize(gif_path, 'animated_from_video_optimized.gif')# overwrite the", "= None self.z = None self.visibility = None class Landmark_list: def __init__(self, config):", "class DrawingSpec: # Color for drawing the annotation. Default to the white color.", "\"\"\" from pygifsicle import optimize gif_path = 'animated_from_video.gif'# create a new one optimize(gif_path,", "< num_landmarks): raise ValueError( f\"Landmark index is out of range. Invalid connection \"", "range(self.list_size): obj = Landmark() self.landmark_list.append(obj) self.Min_Max_axis = { \"x_min\": None, \"x_max\": None, \"y_min\":", "2 # Circle radius. Default to 2 pixels. circle_radius: int = 2 def", "import List, Mapping, Optional, Tuple, Union from model_setup import Model_Setup import matplotlib.pyplot as", "self.Min_Max_axis[\"y_min\"] = min(y) self.Min_Max_axis[\"y_max\"] = max(y) self.Min_Max_axis[\"z_min\"] = min(z) self.Min_Max_axis[\"z_max\"] = max(z) if", "None class Landmark_list: def __init__(self, config): self.config = config self.list_size = self.config.NUM_COORDS self.landmark_list", "self.load_xyz(x, y, z, visibility) else: self.load_xyz(x, y, z) def load_csv(self, RESULT_CSV): df =", "= visibility[i] def load_df(self, df): x = df.loc[:, df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df.loc[:, df.columns.str.startswith(\"y\")].to_numpy().flatten()", "= (0, 0, 255) GREEN_COLOR = (0, 128, 0) BLUE_COLOR = (255, 0,", "def save_gif(IMAGE_PATH): # https://medium.com/swlh/python-animated-images-6a85b9b68f86 image_path = Path(IMAGE_PATH) images = list(image_path.glob(\"*.png\")) images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]),", "be plotted. connections: A list of landmark index tuples that specifies how landmarks", "width, layers = frame.shape fps = 24 video = cv2.VideoWriter( video_name, cv2.VideoWriter_fourcc(*\"DIVX\"), fps=fps,", "24 video = cv2.VideoWriter( video_name, cv2.VideoWriter_fourcc(*\"DIVX\"), fps=fps, frameSize=(width, height) ) for image in", "y, z, visibility) plot_landmarks( landmark_list, config.POSE_CONNECTIONS, counter=counter, IMAGE_PATH=IMAGE_PATH, Min_Max_axis=landmark_list_all.Min_Max_axis, ) counter += 1", "file_name in images: image_list.append(imageio.imread(file_name)) imageio.mimwrite(\"animated_from_images.gif\", image_list) \"\"\" from pygifsicle import optimize gif_path =", "int, int] = WHITE_COLOR # Thickness for drawing the annotation. Default to 2", "y = df_temp[df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df_temp[df.columns.str.startswith(\"z\")].to_numpy().flatten() visibility = df_temp[df.columns.str.startswith(\"v\")].to_numpy().flatten() landmark_list.load_xyz(x, y, z, visibility)", "counter += 1 index += 1 # Adopt the plot_landmarks from MediaPipe #", "* Min_Max_axis[\"y_max\"], -1 * Min_Max_axis[\"y_min\"]) ax.view_init(elev=elevation, azim=azimuth) plotted_landmarks = {} for idx, landmark", "== 0: landmark_list = Landmark_list(config) df_temp = df.iloc[i, :] x = df_temp[df.columns.str.startswith(\"x\")].to_numpy().flatten() y", "DrawingSpec = DrawingSpec(color=BLACK_COLOR, thickness=5), elevation: int = 10, azimuth: int = 10, ):", "= self.config.NUM_COORDS self.landmark_list = [] for i in range(self.list_size): obj = Landmark() self.landmark_list.append(obj)", "= 33 WHITE_COLOR = (224, 224, 224) BLACK_COLOR = (0, 0, 0) RED_COLOR", "#{end_idx}.\" ) if start_idx in plotted_landmarks and end_idx in plotted_landmarks: landmark_pair = [", "(0 <= start_idx < num_landmarks and 0 <= end_idx < num_landmarks): raise ValueError(", "image_path = Path(IMAGE_PATH) images = list(image_path.glob(\"*.png\")) images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) image_list = []", "df.loc[:, df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df.loc[:, df.columns.str.startswith(\"z\")].to_numpy().flatten() if df.columns.str.startswith(\"v\").any() != False: visibility = df.loc[:,", "10)) ax = plt.axes(projection=\"3d\") if Min_Max_axis: ax.set_xlim3d(-1 * Min_Max_axis[\"z_max\"], -1 * Min_Max_axis[\"z_min\"]) ax.set_ylim3d(Min_Max_axis[\"x_min\"],", "https://github.com/google/mediapipe/blob/master/mediapipe/python/solutions/drawing_utils.py @dataclasses.dataclass class DrawingSpec: # Color for drawing the annotation. Default to the", "\"fram_sec_{}.png\".format(counter)), dpi=50) def save_gif(IMAGE_PATH): # https://medium.com/swlh/python-animated-images-6a85b9b68f86 image_path = Path(IMAGE_PATH) images = list(image_path.glob(\"*.png\")) images.sort(key=lambda", "that specifies the connections' drawing settings such as color and line thickness. elevation:", "= 24 video = cv2.VideoWriter( video_name, cv2.VideoWriter_fourcc(*\"DIVX\"), fps=fps, frameSize=(width, height) ) for image", "ax.view_init(elev=elevation, azim=azimuth) plotted_landmarks = {} for idx, landmark in enumerate(landmark_list.landmark_list): if landmark.visibility and", "zs=[-landmark.y], color=_normalize_color(landmark_drawing_spec.color[::-1]), linewidth=landmark_drawing_spec.thickness, ) plotted_landmarks[idx] = (-landmark.z, landmark.x, -landmark.y) if connections: num_landmarks =", "for idx, landmark in enumerate(landmark_list.landmark_list): if landmark.visibility and landmark.visibility < _VISIBILITY_THRESHOLD: continue ax.scatter3D(", "cv2.imread(os.path.join(image_folder, images[0])) height, width, layers = frame.shape fps = 24 video = cv2.VideoWriter(", "start_idx = connection[0] end_idx = connection[1] if not (0 <= start_idx < num_landmarks", "save_video(IMAGE_PATH): image_folder = IMAGE_PATH video_name = \"video.avi\" images = [img for img in", "= df.loc[:, df.columns.str.startswith(\"v\")].to_numpy().flatten() self.Min_Max_axis[\"x_min\"] = min(x) self.Min_Max_axis[\"x_max\"] = max(x) self.Min_Max_axis[\"y_min\"] = min(y) self.Min_Max_axis[\"y_max\"]", "RED_COLOR = (0, 0, 255) GREEN_COLOR = (0, 128, 0) BLUE_COLOR = (255,", "images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) image_list = [] for file_name in images: image_list.append(imageio.imread(file_name)) imageio.mimwrite(\"animated_from_images.gif\",", "y[i] self.landmark_list[i].z = z[i] if visibility != None: self.landmark_list[i].visibility = visibility[i] def load_df(self,", "* Min_Max_axis[\"z_min\"]) ax.set_ylim3d(Min_Max_axis[\"x_min\"], Min_Max_axis[\"x_max\"]) ax.set_zlim3d(-1 * Min_Max_axis[\"y_max\"], -1 * Min_Max_axis[\"y_min\"]) ax.view_init(elev=elevation, azim=azimuth) plotted_landmarks", "ax.set_zlim3d(-1 * Min_Max_axis[\"y_max\"], -1 * Min_Max_axis[\"y_min\"]) ax.view_init(elev=elevation, azim=azimuth) plotted_landmarks = {} for idx,", "plt.axes(projection=\"3d\") if Min_Max_axis: ax.set_xlim3d(-1 * Min_Max_axis[\"z_max\"], -1 * Min_Max_axis[\"z_min\"]) ax.set_ylim3d(Min_Max_axis[\"x_min\"], Min_Max_axis[\"x_max\"]) ax.set_zlim3d(-1 *", "% 1 == 0: landmark_list = Landmark_list(config) df_temp = df.iloc[i, :] x =", "self.list_size = self.config.NUM_COORDS self.landmark_list = [] for i in range(self.list_size): obj = Landmark()", "= 0 for i in range(480,len(df)): if index % 1 == 0: landmark_list", "Color for drawing the annotation. Default to the white color. color: Tuple[int, int,", "# Thickness for drawing the annotation. Default to 2 pixels. thickness: int =", "landmark_drawing_spec: A DrawingSpec object that specifies the landmarks' drawing settings such as color", "and line thickness. connection_drawing_spec: A DrawingSpec object that specifies the connections' drawing settings", "[] for i in range(self.list_size): obj = Landmark() self.landmark_list.append(obj) self.Min_Max_axis = { \"x_min\":", "): \"\"\"Plot the landmarks and the connections in matplotlib 3d. Args: landmark_list: A", "if start_idx in plotted_landmarks and end_idx in plotted_landmarks: landmark_pair = [ plotted_landmarks[start_idx], plotted_landmarks[end_idx],", "ax.plot3D( xs=[landmark_pair[0][0], landmark_pair[1][0]], ys=[landmark_pair[0][1], landmark_pair[1][1]], zs=[landmark_pair[0][2], landmark_pair[1][2]], color=_normalize_color(connection_drawing_spec.color[::-1]), linewidth=connection_drawing_spec.thickness, ) plt.savefig(os.path.join(IMAGE_PATH, \"fram_sec_{}.png\".format(counter)), dpi=50)", "range(480,len(df)): if index % 1 == 0: landmark_list = Landmark_list(config) df_temp = df.iloc[i,", "drawing the annotation. Default to 2 pixels. thickness: int = 2 # Circle", "if visibility != None: self.landmark_list[i].visibility = visibility[i] def load_df(self, df): x = df.loc[:,", "int] = WHITE_COLOR # Thickness for drawing the annotation. Default to 2 pixels.", "layers = frame.shape fps = 24 video = cv2.VideoWriter( video_name, cv2.VideoWriter_fourcc(*\"DIVX\"), fps=fps, frameSize=(width,", "= landmark_list.list_size # Draws the connections if the start and end landmarks are", "landmark.visibility and landmark.visibility < _VISIBILITY_THRESHOLD: continue ax.scatter3D( xs=[-landmark.z], ys=[landmark.x], zs=[-landmark.y], color=_normalize_color(landmark_drawing_spec.color[::-1]), linewidth=landmark_drawing_spec.thickness, )", "drawing settings such as color and line thickness. elevation: The elevation from which", "None: self.landmark_list[i].visibility = visibility[i] def load_df(self, df): x = df.loc[:, df.columns.str.startswith(\"x\")].to_numpy().flatten() y =", "start_idx < num_landmarks and 0 <= end_idx < num_landmarks): raise ValueError( f\"Landmark index", "(224, 224, 224) BLACK_COLOR = (0, 0, 0) RED_COLOR = (0, 0, 255)", "elevation: int = 10, azimuth: int = 10, ): \"\"\"Plot the landmarks and", "min(z) self.Min_Max_axis[\"z_max\"] = max(z) if df.columns.str.startswith(\"v\").any(): self.load_xyz(x, y, z, visibility) else: self.load_xyz(x, y,", "from pathlib import Path import os import cv2 NUM_COORDS = 33 WHITE_COLOR =", "Min_Max_axis=None, landmark_drawing_spec: DrawingSpec = DrawingSpec(color=RED_COLOR, thickness=5), connection_drawing_spec: DrawingSpec = DrawingSpec(color=BLACK_COLOR, thickness=5), elevation: int", "contain invalid landmark index. \"\"\" if not landmark_list: return fig = plt.figure(figsize=(10, 10))", "import cv2 NUM_COORDS = 33 WHITE_COLOR = (224, 224, 224) BLACK_COLOR = (0,", "WHITE_COLOR # Thickness for drawing the annotation. Default to 2 pixels. thickness: int", "= 2 def _normalize_color(color): return tuple(v / 255.0 for v in color) def", "landmark_pair[1][0]], ys=[landmark_pair[0][1], landmark_pair[1][1]], zs=[landmark_pair[0][2], landmark_pair[1][2]], color=_normalize_color(connection_drawing_spec.color[::-1]), linewidth=connection_drawing_spec.thickness, ) plt.savefig(os.path.join(IMAGE_PATH, \"fram_sec_{}.png\".format(counter)), dpi=50) def save_gif(IMAGE_PATH):", "in enumerate(landmark_list.landmark_list): if landmark.visibility and landmark.visibility < _VISIBILITY_THRESHOLD: continue ax.scatter3D( xs=[-landmark.z], ys=[landmark.x], zs=[-landmark.y],", "< num_landmarks and 0 <= end_idx < num_landmarks): raise ValueError( f\"Landmark index is", "= (0, 0, 0) RED_COLOR = (0, 0, 255) GREEN_COLOR = (0, 128,", "= list(image_path.glob(\"*.png\")) images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) image_list = [] for file_name in images:", "if connections: num_landmarks = landmark_list.list_size # Draws the connections if the start and", "to rotate the plot. Raises: ValueError: If any connetions contain invalid landmark index.", "df): x = df.loc[:, df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df.loc[:, df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df.loc[:, df.columns.str.startswith(\"z\")].to_numpy().flatten()", "Plot every frame index = 0 counter = 0 for i in range(480,len(df)):", "DrawingSpec = DrawingSpec(color=RED_COLOR, thickness=5), connection_drawing_spec: DrawingSpec = DrawingSpec(color=BLACK_COLOR, thickness=5), elevation: int = 10,", "white color. color: Tuple[int, int, int] = WHITE_COLOR # Thickness for drawing the", "return fig = plt.figure(figsize=(10, 10)) ax = plt.axes(projection=\"3d\") if Min_Max_axis: ax.set_xlim3d(-1 * Min_Max_axis[\"z_max\"],", "model_setup import Model_Setup import matplotlib.pyplot as plt import pandas as pd import imageio", "drawing the annotation. Default to the white color. color: Tuple[int, int, int] =", "a new one optimize(gif_path, 'animated_from_video_optimized.gif')# overwrite the original one optimize(gif_path) \"\"\" def save_video(IMAGE_PATH):", "<= end_idx < num_landmarks): raise ValueError( f\"Landmark index is out of range. Invalid", "!= None: self.landmark_list[i].visibility = visibility[i] def load_df(self, df): x = df.loc[:, df.columns.str.startswith(\"x\")].to_numpy().flatten() y", "= 10, ): \"\"\"Plot the landmarks and the connections in matplotlib 3d. Args:", "0) _PRESENCE_THRESHOLD = 0.5 _VISIBILITY_THRESHOLD = 0.5 _RGB_CHANNELS = 3 class Landmark: def", "for drawing the annotation. Default to 2 pixels. thickness: int = 2 #", "None, } def load_xyz(self, x, y, z, visibility=None): for i, landmark in enumerate(self.landmark_list):", "2 pixels. circle_radius: int = 2 def _normalize_color(color): return tuple(v / 255.0 for", "fig = plt.figure(figsize=(10, 10)) ax = plt.axes(projection=\"3d\") if Min_Max_axis: ax.set_xlim3d(-1 * Min_Max_axis[\"z_max\"], -1", "import pandas as pd import imageio from pathlib import Path import os import", "landmarks' drawing settings such as color and line thickness. connection_drawing_spec: A DrawingSpec object", "elevation from which to view the plot. azimuth: the azimuth angle to rotate", "of landmark index tuples that specifies how landmarks to be connected. landmark_drawing_spec: A", "landmark_list, connections: Optional[List[Tuple[int, int]]] = None, counter=None, IMAGE_PATH=None, Min_Max_axis=None, landmark_drawing_spec: DrawingSpec = DrawingSpec(color=RED_COLOR,", "10, azimuth: int = 10, ): \"\"\"Plot the landmarks and the connections in", "video = cv2.VideoWriter( video_name, cv2.VideoWriter_fourcc(*\"DIVX\"), fps=fps, frameSize=(width, height) ) for image in images:", "imageio.mimwrite(\"animated_from_images.gif\", image_list) \"\"\" from pygifsicle import optimize gif_path = 'animated_from_video.gif'# create a new", "= df.loc[:, df.columns.str.startswith(\"z\")].to_numpy().flatten() if df.columns.str.startswith(\"v\").any() != False: visibility = df.loc[:, df.columns.str.startswith(\"v\")].to_numpy().flatten() self.Min_Max_axis[\"x_min\"] =", "= (-landmark.z, landmark.x, -landmark.y) if connections: num_landmarks = landmark_list.list_size # Draws the connections", "self.load_df(df) return df def save_image(config, csv_file, IMAGE_PATH): landmark_list_all = Landmark_list(config) df = landmark_list_all.load_csv(csv_file)", "matplotlib.pyplot as plt import pandas as pd import imageio from pathlib import Path", "images: image_list.append(imageio.imread(file_name)) imageio.mimwrite(\"animated_from_images.gif\", image_list) \"\"\" from pygifsicle import optimize gif_path = 'animated_from_video.gif'# create", "proto message to be plotted. connections: A list of landmark index tuples that", "/ 255.0 for v in color) def plot_landmarks( landmark_list, connections: Optional[List[Tuple[int, int]]] =", "= frame.shape fps = 24 video = cv2.VideoWriter( video_name, cv2.VideoWriter_fourcc(*\"DIVX\"), fps=fps, frameSize=(width, height)", "the white color. color: Tuple[int, int, int] = WHITE_COLOR # Thickness for drawing", "settings such as color and line thickness. connection_drawing_spec: A DrawingSpec object that specifies", "None, \"z_max\": None, } def load_xyz(self, x, y, z, visibility=None): for i, landmark", "IMAGE_PATH=None, Min_Max_axis=None, landmark_drawing_spec: DrawingSpec = DrawingSpec(color=RED_COLOR, thickness=5), connection_drawing_spec: DrawingSpec = DrawingSpec(color=BLACK_COLOR, thickness=5), elevation:", "gif_path = 'animated_from_video.gif'# create a new one optimize(gif_path, 'animated_from_video_optimized.gif')# overwrite the original one", "for v in color) def plot_landmarks( landmark_list, connections: Optional[List[Tuple[int, int]]] = None, counter=None,", "self.y = None self.z = None self.visibility = None class Landmark_list: def __init__(self,", "= connection[1] if not (0 <= start_idx < num_landmarks and 0 <= end_idx", "IMAGE_PATH video_name = \"video.avi\" images = [img for img in os.listdir(image_folder) if img.endswith(\".png\")]", "# Adopt the plot_landmarks from MediaPipe # https://github.com/google/mediapipe/blob/master/mediapipe/python/solutions/drawing_utils.py @dataclasses.dataclass class DrawingSpec: # Color", "df.iloc[i, :] x = df_temp[df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df_temp[df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df_temp[df.columns.str.startswith(\"z\")].to_numpy().flatten() visibility =", "os import cv2 NUM_COORDS = 33 WHITE_COLOR = (224, 224, 224) BLACK_COLOR =", "new one optimize(gif_path, 'animated_from_video_optimized.gif')# overwrite the original one optimize(gif_path) \"\"\" def save_video(IMAGE_PATH): image_folder", "how landmarks to be connected. landmark_drawing_spec: A DrawingSpec object that specifies the landmarks'", "landmark_list.list_size # Draws the connections if the start and end landmarks are both", "landmark in enumerate(self.landmark_list): self.landmark_list[i].x = x[i] self.landmark_list[i].y = y[i] self.landmark_list[i].z = z[i] if", "= DrawingSpec(color=RED_COLOR, thickness=5), connection_drawing_spec: DrawingSpec = DrawingSpec(color=BLACK_COLOR, thickness=5), elevation: int = 10, azimuth:", "df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df.loc[:, df.columns.str.startswith(\"z\")].to_numpy().flatten() if df.columns.str.startswith(\"v\").any() != False: visibility = df.loc[:, df.columns.str.startswith(\"v\")].to_numpy().flatten()", "connection \" f\"from landmark #{start_idx} to landmark #{end_idx}.\" ) if start_idx in plotted_landmarks", "224, 224) BLACK_COLOR = (0, 0, 0) RED_COLOR = (0, 0, 255) GREEN_COLOR", "the plot. azimuth: the azimuth angle to rotate the plot. Raises: ValueError: If", "0 for i in range(480,len(df)): if index % 1 == 0: landmark_list =", ") counter += 1 index += 1 # Adopt the plot_landmarks from MediaPipe", "if img.endswith(\".png\")] images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) frame = cv2.imread(os.path.join(image_folder, images[0])) height, width, layers", "= 0 counter = 0 for i in range(480,len(df)): if index % 1", "the azimuth angle to rotate the plot. Raises: ValueError: If any connetions contain", "to 2 pixels. circle_radius: int = 2 def _normalize_color(color): return tuple(v / 255.0", "not (0 <= start_idx < num_landmarks and 0 <= end_idx < num_landmarks): raise", "= [ plotted_landmarks[start_idx], plotted_landmarks[end_idx], ] ax.plot3D( xs=[landmark_pair[0][0], landmark_pair[1][0]], ys=[landmark_pair[0][1], landmark_pair[1][1]], zs=[landmark_pair[0][2], landmark_pair[1][2]], color=_normalize_color(connection_drawing_spec.color[::-1]),", "-1 * Min_Max_axis[\"y_min\"]) ax.view_init(elev=elevation, azim=azimuth) plotted_landmarks = {} for idx, landmark in enumerate(landmark_list.landmark_list):", "= df_temp[df.columns.str.startswith(\"v\")].to_numpy().flatten() landmark_list.load_xyz(x, y, z, visibility) plot_landmarks( landmark_list, config.POSE_CONNECTIONS, counter=counter, IMAGE_PATH=IMAGE_PATH, Min_Max_axis=landmark_list_all.Min_Max_axis, )", "thickness. connection_drawing_spec: A DrawingSpec object that specifies the connections' drawing settings such as", "Min_Max_axis: ax.set_xlim3d(-1 * Min_Max_axis[\"z_max\"], -1 * Min_Max_axis[\"z_min\"]) ax.set_ylim3d(Min_Max_axis[\"x_min\"], Min_Max_axis[\"x_max\"]) ax.set_zlim3d(-1 * Min_Max_axis[\"y_max\"], -1", "_PRESENCE_THRESHOLD = 0.5 _VISIBILITY_THRESHOLD = 0.5 _RGB_CHANNELS = 3 class Landmark: def __init__(self,", "max(x) self.Min_Max_axis[\"y_min\"] = min(y) self.Min_Max_axis[\"y_max\"] = max(y) self.Min_Max_axis[\"z_min\"] = min(z) self.Min_Max_axis[\"z_max\"] = max(z)", "invalid landmark index. \"\"\" if not landmark_list: return fig = plt.figure(figsize=(10, 10)) ax", "{ \"x_min\": None, \"x_max\": None, \"y_min\": None, \"y_max\": None, \"z_min\": None, \"z_max\": None,", "df.columns.str.startswith(\"v\").any() != False: visibility = df.loc[:, df.columns.str.startswith(\"v\")].to_numpy().flatten() self.Min_Max_axis[\"x_min\"] = min(x) self.Min_Max_axis[\"x_max\"] = max(x)", "the landmarks' drawing settings such as color and line thickness. connection_drawing_spec: A DrawingSpec", "Min_Max_axis[\"z_max\"], -1 * Min_Max_axis[\"z_min\"]) ax.set_ylim3d(Min_Max_axis[\"x_min\"], Min_Max_axis[\"x_max\"]) ax.set_zlim3d(-1 * Min_Max_axis[\"y_max\"], -1 * Min_Max_axis[\"y_min\"]) ax.view_init(elev=elevation,", "the start and end landmarks are both visible. for connection in connections: start_idx", "img in os.listdir(image_folder) if img.endswith(\".png\")] images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) frame = cv2.imread(os.path.join(image_folder, images[0]))", "def save_video(IMAGE_PATH): image_folder = IMAGE_PATH video_name = \"video.avi\" images = [img for img", "the connections in matplotlib 3d. Args: landmark_list: A normalized landmark list proto message", "def save_image(config, csv_file, IMAGE_PATH): landmark_list_all = Landmark_list(config) df = landmark_list_all.load_csv(csv_file) landmark_list_all.load_df(df) # Plot", "if the start and end landmarks are both visible. for connection in connections:", "import imageio from pathlib import Path import os import cv2 NUM_COORDS = 33", "DrawingSpec: # Color for drawing the annotation. Default to the white color. color:", "config): self.config = config self.list_size = self.config.NUM_COORDS self.landmark_list = [] for i in", "= landmark_list_all.load_csv(csv_file) landmark_list_all.load_df(df) # Plot every frame index = 0 counter = 0", "Optional[List[Tuple[int, int]]] = None, counter=None, IMAGE_PATH=None, Min_Max_axis=None, landmark_drawing_spec: DrawingSpec = DrawingSpec(color=RED_COLOR, thickness=5), connection_drawing_spec:", "counter=None, IMAGE_PATH=None, Min_Max_axis=None, landmark_drawing_spec: DrawingSpec = DrawingSpec(color=RED_COLOR, thickness=5), connection_drawing_spec: DrawingSpec = DrawingSpec(color=BLACK_COLOR, thickness=5),", "connections: Optional[List[Tuple[int, int]]] = None, counter=None, IMAGE_PATH=None, Min_Max_axis=None, landmark_drawing_spec: DrawingSpec = DrawingSpec(color=RED_COLOR, thickness=5),", "if df.columns.str.startswith(\"v\").any(): self.load_xyz(x, y, z, visibility) else: self.load_xyz(x, y, z) def load_csv(self, RESULT_CSV):", "= WHITE_COLOR # Thickness for drawing the annotation. Default to 2 pixels. thickness:", "= [] for file_name in images: image_list.append(imageio.imread(file_name)) imageio.mimwrite(\"animated_from_images.gif\", image_list) \"\"\" from pygifsicle import", "@dataclasses.dataclass class DrawingSpec: # Color for drawing the annotation. Default to the white", "end landmarks are both visible. for connection in connections: start_idx = connection[0] end_idx", "plot. azimuth: the azimuth angle to rotate the plot. Raises: ValueError: If any", "visibility = df.loc[:, df.columns.str.startswith(\"v\")].to_numpy().flatten() self.Min_Max_axis[\"x_min\"] = min(x) self.Min_Max_axis[\"x_max\"] = max(x) self.Min_Max_axis[\"y_min\"] = min(y)", "thickness=5), elevation: int = 10, azimuth: int = 10, ): \"\"\"Plot the landmarks", "A DrawingSpec object that specifies the connections' drawing settings such as color and", "end_idx < num_landmarks): raise ValueError( f\"Landmark index is out of range. Invalid connection", "None self.y = None self.z = None self.visibility = None class Landmark_list: def", "MediaPipe # https://github.com/google/mediapipe/blob/master/mediapipe/python/solutions/drawing_utils.py @dataclasses.dataclass class DrawingSpec: # Color for drawing the annotation. Default", "dataclasses from typing import List, Mapping, Optional, Tuple, Union from model_setup import Model_Setup", "xs=[landmark_pair[0][0], landmark_pair[1][0]], ys=[landmark_pair[0][1], landmark_pair[1][1]], zs=[landmark_pair[0][2], landmark_pair[1][2]], color=_normalize_color(connection_drawing_spec.color[::-1]), linewidth=connection_drawing_spec.thickness, ) plt.savefig(os.path.join(IMAGE_PATH, \"fram_sec_{}.png\".format(counter)), dpi=50) def", "specifies the connections' drawing settings such as color and line thickness. elevation: The", "= (224, 224, 224) BLACK_COLOR = (0, 0, 0) RED_COLOR = (0, 0,", "x = df.loc[:, df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df.loc[:, df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df.loc[:, df.columns.str.startswith(\"z\")].to_numpy().flatten() if", "y, z, visibility) else: self.load_xyz(x, y, z) def load_csv(self, RESULT_CSV): df = pd.read_csv(RESULT_CSV)", "landmark_list: return fig = plt.figure(figsize=(10, 10)) ax = plt.axes(projection=\"3d\") if Min_Max_axis: ax.set_xlim3d(-1 *", "Min_Max_axis[\"y_max\"], -1 * Min_Max_axis[\"y_min\"]) ax.view_init(elev=elevation, azim=azimuth) plotted_landmarks = {} for idx, landmark in", "[] for file_name in images: image_list.append(imageio.imread(file_name)) imageio.mimwrite(\"animated_from_images.gif\", image_list) \"\"\" from pygifsicle import optimize", "= None class Landmark_list: def __init__(self, config): self.config = config self.list_size = self.config.NUM_COORDS", "0.5 _RGB_CHANNELS = 3 class Landmark: def __init__(self, x=None, y=None, z=None, visibility=None): self.x", "index tuples that specifies how landmarks to be connected. landmark_drawing_spec: A DrawingSpec object", "imageio from pathlib import Path import os import cv2 NUM_COORDS = 33 WHITE_COLOR", "plot_landmarks( landmark_list, config.POSE_CONNECTIONS, counter=counter, IMAGE_PATH=IMAGE_PATH, Min_Max_axis=landmark_list_all.Min_Max_axis, ) counter += 1 index += 1", "color and line thickness. connection_drawing_spec: A DrawingSpec object that specifies the connections' drawing", "pd.read_csv(RESULT_CSV) self.load_df(df) return df def save_image(config, csv_file, IMAGE_PATH): landmark_list_all = Landmark_list(config) df =", "self.landmark_list.append(obj) self.Min_Max_axis = { \"x_min\": None, \"x_max\": None, \"y_min\": None, \"y_max\": None, \"z_min\":", "index += 1 # Adopt the plot_landmarks from MediaPipe # https://github.com/google/mediapipe/blob/master/mediapipe/python/solutions/drawing_utils.py @dataclasses.dataclass class", "Landmark_list: def __init__(self, config): self.config = config self.list_size = self.config.NUM_COORDS self.landmark_list = []", "Min_Max_axis=landmark_list_all.Min_Max_axis, ) counter += 1 index += 1 # Adopt the plot_landmarks from", "plt.savefig(os.path.join(IMAGE_PATH, \"fram_sec_{}.png\".format(counter)), dpi=50) def save_gif(IMAGE_PATH): # https://medium.com/swlh/python-animated-images-6a85b9b68f86 image_path = Path(IMAGE_PATH) images = list(image_path.glob(\"*.png\"))", "azimuth angle to rotate the plot. Raises: ValueError: If any connetions contain invalid", "plotted_landmarks[start_idx], plotted_landmarks[end_idx], ] ax.plot3D( xs=[landmark_pair[0][0], landmark_pair[1][0]], ys=[landmark_pair[0][1], landmark_pair[1][1]], zs=[landmark_pair[0][2], landmark_pair[1][2]], color=_normalize_color(connection_drawing_spec.color[::-1]), linewidth=connection_drawing_spec.thickness, )", "save_gif(IMAGE_PATH): # https://medium.com/swlh/python-animated-images-6a85b9b68f86 image_path = Path(IMAGE_PATH) images = list(image_path.glob(\"*.png\")) images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False)", "plotted. connections: A list of landmark index tuples that specifies how landmarks to", "plotted_landmarks: landmark_pair = [ plotted_landmarks[start_idx], plotted_landmarks[end_idx], ] ax.plot3D( xs=[landmark_pair[0][0], landmark_pair[1][0]], ys=[landmark_pair[0][1], landmark_pair[1][1]], zs=[landmark_pair[0][2],", "import Model_Setup import matplotlib.pyplot as plt import pandas as pd import imageio from", "height, width, layers = frame.shape fps = 24 video = cv2.VideoWriter( video_name, cv2.VideoWriter_fourcc(*\"DIVX\"),", "Landmark_list(config) df_temp = df.iloc[i, :] x = df_temp[df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df_temp[df.columns.str.startswith(\"y\")].to_numpy().flatten() z =", "Raises: ValueError: If any connetions contain invalid landmark index. \"\"\" if not landmark_list:", "csv_file, IMAGE_PATH): landmark_list_all = Landmark_list(config) df = landmark_list_all.load_csv(csv_file) landmark_list_all.load_df(df) # Plot every frame", "cv2 NUM_COORDS = 33 WHITE_COLOR = (224, 224, 224) BLACK_COLOR = (0, 0,", "visibility=None): for i, landmark in enumerate(self.landmark_list): self.landmark_list[i].x = x[i] self.landmark_list[i].y = y[i] self.landmark_list[i].z", "= Landmark() self.landmark_list.append(obj) self.Min_Max_axis = { \"x_min\": None, \"x_max\": None, \"y_min\": None, \"y_max\":", "in range(self.list_size): obj = Landmark() self.landmark_list.append(obj) self.Min_Max_axis = { \"x_min\": None, \"x_max\": None,", "Circle radius. Default to 2 pixels. circle_radius: int = 2 def _normalize_color(color): return", "= None, counter=None, IMAGE_PATH=None, Min_Max_axis=None, landmark_drawing_spec: DrawingSpec = DrawingSpec(color=RED_COLOR, thickness=5), connection_drawing_spec: DrawingSpec =", "connections: num_landmarks = landmark_list.list_size # Draws the connections if the start and end", "landmark #{start_idx} to landmark #{end_idx}.\" ) if start_idx in plotted_landmarks and end_idx in", "dpi=50) def save_gif(IMAGE_PATH): # https://medium.com/swlh/python-animated-images-6a85b9b68f86 image_path = Path(IMAGE_PATH) images = list(image_path.glob(\"*.png\")) images.sort(key=lambda x:", "landmarks to be connected. landmark_drawing_spec: A DrawingSpec object that specifies the landmarks' drawing", "import os import cv2 NUM_COORDS = 33 WHITE_COLOR = (224, 224, 224) BLACK_COLOR", "# Circle radius. Default to 2 pixels. circle_radius: int = 2 def _normalize_color(color):", "1 == 0: landmark_list = Landmark_list(config) df_temp = df.iloc[i, :] x = df_temp[df.columns.str.startswith(\"x\")].to_numpy().flatten()", "connections if the start and end landmarks are both visible. for connection in", "plt import pandas as pd import imageio from pathlib import Path import os", "GREEN_COLOR = (0, 128, 0) BLUE_COLOR = (255, 0, 0) _PRESENCE_THRESHOLD = 0.5", "index % 1 == 0: landmark_list = Landmark_list(config) df_temp = df.iloc[i, :] x", "<= start_idx < num_landmarks and 0 <= end_idx < num_landmarks): raise ValueError( f\"Landmark", "None, counter=None, IMAGE_PATH=None, Min_Max_axis=None, landmark_drawing_spec: DrawingSpec = DrawingSpec(color=RED_COLOR, thickness=5), connection_drawing_spec: DrawingSpec = DrawingSpec(color=BLACK_COLOR,", "\"\"\"Plot the landmarks and the connections in matplotlib 3d. Args: landmark_list: A normalized", "self.config = config self.list_size = self.config.NUM_COORDS self.landmark_list = [] for i in range(self.list_size):", "= x[i] self.landmark_list[i].y = y[i] self.landmark_list[i].z = z[i] if visibility != None: self.landmark_list[i].visibility", "visibility=None): self.x = None self.y = None self.z = None self.visibility = None", "= min(y) self.Min_Max_axis[\"y_max\"] = max(y) self.Min_Max_axis[\"z_min\"] = min(z) self.Min_Max_axis[\"z_max\"] = max(z) if df.columns.str.startswith(\"v\").any():", "circle_radius: int = 2 def _normalize_color(color): return tuple(v / 255.0 for v in", "visibility != None: self.landmark_list[i].visibility = visibility[i] def load_df(self, df): x = df.loc[:, df.columns.str.startswith(\"x\")].to_numpy().flatten()", "0 <= end_idx < num_landmarks): raise ValueError( f\"Landmark index is out of range.", "end_idx = connection[1] if not (0 <= start_idx < num_landmarks and 0 <=", "enumerate(landmark_list.landmark_list): if landmark.visibility and landmark.visibility < _VISIBILITY_THRESHOLD: continue ax.scatter3D( xs=[-landmark.z], ys=[landmark.x], zs=[-landmark.y], color=_normalize_color(landmark_drawing_spec.color[::-1]),", "landmark_list = Landmark_list(config) df_temp = df.iloc[i, :] x = df_temp[df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df_temp[df.columns.str.startswith(\"y\")].to_numpy().flatten()", "else: self.load_xyz(x, y, z) def load_csv(self, RESULT_CSV): df = pd.read_csv(RESULT_CSV) self.load_df(df) return df", "cv2.VideoWriter( video_name, cv2.VideoWriter_fourcc(*\"DIVX\"), fps=fps, frameSize=(width, height) ) for image in images: video.write(cv2.imread(os.path.join(image_folder, image)))", "landmarks are both visible. for connection in connections: start_idx = connection[0] end_idx =", "frame.shape fps = 24 video = cv2.VideoWriter( video_name, cv2.VideoWriter_fourcc(*\"DIVX\"), fps=fps, frameSize=(width, height) )", "fps = 24 video = cv2.VideoWriter( video_name, cv2.VideoWriter_fourcc(*\"DIVX\"), fps=fps, frameSize=(width, height) ) for", "i, landmark in enumerate(self.landmark_list): self.landmark_list[i].x = x[i] self.landmark_list[i].y = y[i] self.landmark_list[i].z = z[i]", "None self.visibility = None class Landmark_list: def __init__(self, config): self.config = config self.list_size", "BLUE_COLOR = (255, 0, 0) _PRESENCE_THRESHOLD = 0.5 _VISIBILITY_THRESHOLD = 0.5 _RGB_CHANNELS =", "The elevation from which to view the plot. azimuth: the azimuth angle to", "plot_landmarks from MediaPipe # https://github.com/google/mediapipe/blob/master/mediapipe/python/solutions/drawing_utils.py @dataclasses.dataclass class DrawingSpec: # Color for drawing the", "255.0 for v in color) def plot_landmarks( landmark_list, connections: Optional[List[Tuple[int, int]]] = None,", "= pd.read_csv(RESULT_CSV) self.load_df(df) return df def save_image(config, csv_file, IMAGE_PATH): landmark_list_all = Landmark_list(config) df", "= \"video.avi\" images = [img for img in os.listdir(image_folder) if img.endswith(\".png\")] images.sort(key=lambda x:", "# Color for drawing the annotation. Default to the white color. color: Tuple[int,", "z, visibility) plot_landmarks( landmark_list, config.POSE_CONNECTIONS, counter=counter, IMAGE_PATH=IMAGE_PATH, Min_Max_axis=landmark_list_all.Min_Max_axis, ) counter += 1 index", "0, 0) RED_COLOR = (0, 0, 255) GREEN_COLOR = (0, 128, 0) BLUE_COLOR", "reverse=False) frame = cv2.imread(os.path.join(image_folder, images[0])) height, width, layers = frame.shape fps = 24", "as color and line thickness. connection_drawing_spec: A DrawingSpec object that specifies the connections'", "line thickness. connection_drawing_spec: A DrawingSpec object that specifies the connections' drawing settings such", "int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) frame = cv2.imread(os.path.join(image_folder, images[0])) height, width, layers = frame.shape fps =", "i in range(self.list_size): obj = Landmark() self.landmark_list.append(obj) self.Min_Max_axis = { \"x_min\": None, \"x_max\":", "plotted_landmarks = {} for idx, landmark in enumerate(landmark_list.landmark_list): if landmark.visibility and landmark.visibility <", "in matplotlib 3d. Args: landmark_list: A normalized landmark list proto message to be", "Tuple, Union from model_setup import Model_Setup import matplotlib.pyplot as plt import pandas as", "df.columns.str.startswith(\"z\")].to_numpy().flatten() if df.columns.str.startswith(\"v\").any() != False: visibility = df.loc[:, df.columns.str.startswith(\"v\")].to_numpy().flatten() self.Min_Max_axis[\"x_min\"] = min(x) self.Min_Max_axis[\"x_max\"]", "= df_temp[df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df_temp[df.columns.str.startswith(\"z\")].to_numpy().flatten() visibility = df_temp[df.columns.str.startswith(\"v\")].to_numpy().flatten() landmark_list.load_xyz(x, y, z, visibility) plot_landmarks(", "* Min_Max_axis[\"z_max\"], -1 * Min_Max_axis[\"z_min\"]) ax.set_ylim3d(Min_Max_axis[\"x_min\"], Min_Max_axis[\"x_max\"]) ax.set_zlim3d(-1 * Min_Max_axis[\"y_max\"], -1 * Min_Max_axis[\"y_min\"])", "frame index = 0 counter = 0 for i in range(480,len(df)): if index", "typing import List, Mapping, Optional, Tuple, Union from model_setup import Model_Setup import matplotlib.pyplot", "optimize gif_path = 'animated_from_video.gif'# create a new one optimize(gif_path, 'animated_from_video_optimized.gif')# overwrite the original", "image_list.append(imageio.imread(file_name)) imageio.mimwrite(\"animated_from_images.gif\", image_list) \"\"\" from pygifsicle import optimize gif_path = 'animated_from_video.gif'# create a", "in plotted_landmarks: landmark_pair = [ plotted_landmarks[start_idx], plotted_landmarks[end_idx], ] ax.plot3D( xs=[landmark_pair[0][0], landmark_pair[1][0]], ys=[landmark_pair[0][1], landmark_pair[1][1]],", "\"\"\" def save_video(IMAGE_PATH): image_folder = IMAGE_PATH video_name = \"video.avi\" images = [img for", "to be plotted. connections: A list of landmark index tuples that specifies how", "0.5 _VISIBILITY_THRESHOLD = 0.5 _RGB_CHANNELS = 3 class Landmark: def __init__(self, x=None, y=None,", "int = 2 def _normalize_color(color): return tuple(v / 255.0 for v in color)", "df.loc[:, df.columns.str.startswith(\"z\")].to_numpy().flatten() if df.columns.str.startswith(\"v\").any() != False: visibility = df.loc[:, df.columns.str.startswith(\"v\")].to_numpy().flatten() self.Min_Max_axis[\"x_min\"] = min(x)", "-landmark.y) if connections: num_landmarks = landmark_list.list_size # Draws the connections if the start", "max(y) self.Min_Max_axis[\"z_min\"] = min(z) self.Min_Max_axis[\"z_max\"] = max(z) if df.columns.str.startswith(\"v\").any(): self.load_xyz(x, y, z, visibility)", "to view the plot. azimuth: the azimuth angle to rotate the plot. Raises:", "of range. Invalid connection \" f\"from landmark #{start_idx} to landmark #{end_idx}.\" ) if", "landmark index tuples that specifies how landmarks to be connected. landmark_drawing_spec: A DrawingSpec", "Landmark() self.landmark_list.append(obj) self.Min_Max_axis = { \"x_min\": None, \"x_max\": None, \"y_min\": None, \"y_max\": None,", "linewidth=landmark_drawing_spec.thickness, ) plotted_landmarks[idx] = (-landmark.z, landmark.x, -landmark.y) if connections: num_landmarks = landmark_list.list_size #", "\"z_min\": None, \"z_max\": None, } def load_xyz(self, x, y, z, visibility=None): for i,", "Model_Setup import matplotlib.pyplot as plt import pandas as pd import imageio from pathlib", "connected. landmark_drawing_spec: A DrawingSpec object that specifies the landmarks' drawing settings such as", "zs=[landmark_pair[0][2], landmark_pair[1][2]], color=_normalize_color(connection_drawing_spec.color[::-1]), linewidth=connection_drawing_spec.thickness, ) plt.savefig(os.path.join(IMAGE_PATH, \"fram_sec_{}.png\".format(counter)), dpi=50) def save_gif(IMAGE_PATH): # https://medium.com/swlh/python-animated-images-6a85b9b68f86 image_path", "config self.list_size = self.config.NUM_COORDS self.landmark_list = [] for i in range(self.list_size): obj =", ":] x = df_temp[df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df_temp[df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df_temp[df.columns.str.startswith(\"z\")].to_numpy().flatten() visibility = df_temp[df.columns.str.startswith(\"v\")].to_numpy().flatten()", "ax.set_xlim3d(-1 * Min_Max_axis[\"z_max\"], -1 * Min_Max_axis[\"z_min\"]) ax.set_ylim3d(Min_Max_axis[\"x_min\"], Min_Max_axis[\"x_max\"]) ax.set_zlim3d(-1 * Min_Max_axis[\"y_max\"], -1 *", "visible. for connection in connections: start_idx = connection[0] end_idx = connection[1] if not", "as color and line thickness. elevation: The elevation from which to view the", "int = 10, ): \"\"\"Plot the landmarks and the connections in matplotlib 3d.", "list of landmark index tuples that specifies how landmarks to be connected. landmark_drawing_spec:", "# Plot every frame index = 0 counter = 0 for i in", "counter=counter, IMAGE_PATH=IMAGE_PATH, Min_Max_axis=landmark_list_all.Min_Max_axis, ) counter += 1 index += 1 # Adopt the", "A list of landmark index tuples that specifies how landmarks to be connected.", "the connections if the start and end landmarks are both visible. for connection", "= (255, 0, 0) _PRESENCE_THRESHOLD = 0.5 _VISIBILITY_THRESHOLD = 0.5 _RGB_CHANNELS = 3", "= { \"x_min\": None, \"x_max\": None, \"y_min\": None, \"y_max\": None, \"z_min\": None, \"z_max\":", "landmark_pair = [ plotted_landmarks[start_idx], plotted_landmarks[end_idx], ] ax.plot3D( xs=[landmark_pair[0][0], landmark_pair[1][0]], ys=[landmark_pair[0][1], landmark_pair[1][1]], zs=[landmark_pair[0][2], landmark_pair[1][2]],", "create a new one optimize(gif_path, 'animated_from_video_optimized.gif')# overwrite the original one optimize(gif_path) \"\"\" def", "reverse=False) image_list = [] for file_name in images: image_list.append(imageio.imread(file_name)) imageio.mimwrite(\"animated_from_images.gif\", image_list) \"\"\" from", "landmark_list_all.load_csv(csv_file) landmark_list_all.load_df(df) # Plot every frame index = 0 counter = 0 for", "< _VISIBILITY_THRESHOLD: continue ax.scatter3D( xs=[-landmark.z], ys=[landmark.x], zs=[-landmark.y], color=_normalize_color(landmark_drawing_spec.color[::-1]), linewidth=landmark_drawing_spec.thickness, ) plotted_landmarks[idx] = (-landmark.z,", "= z[i] if visibility != None: self.landmark_list[i].visibility = visibility[i] def load_df(self, df): x", "self.Min_Max_axis[\"y_max\"] = max(y) self.Min_Max_axis[\"z_min\"] = min(z) self.Min_Max_axis[\"z_max\"] = max(z) if df.columns.str.startswith(\"v\").any(): self.load_xyz(x, y,", "= None self.y = None self.z = None self.visibility = None class Landmark_list:", "thickness=5), connection_drawing_spec: DrawingSpec = DrawingSpec(color=BLACK_COLOR, thickness=5), elevation: int = 10, azimuth: int =", "optimize(gif_path) \"\"\" def save_video(IMAGE_PATH): image_folder = IMAGE_PATH video_name = \"video.avi\" images = [img", "z = df_temp[df.columns.str.startswith(\"z\")].to_numpy().flatten() visibility = df_temp[df.columns.str.startswith(\"v\")].to_numpy().flatten() landmark_list.load_xyz(x, y, z, visibility) plot_landmarks( landmark_list, config.POSE_CONNECTIONS,", "start and end landmarks are both visible. for connection in connections: start_idx =", "Path(IMAGE_PATH) images = list(image_path.glob(\"*.png\")) images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) image_list = [] for file_name", "from model_setup import Model_Setup import matplotlib.pyplot as plt import pandas as pd import", "ValueError: If any connetions contain invalid landmark index. \"\"\" if not landmark_list: return", "df = pd.read_csv(RESULT_CSV) self.load_df(df) return df def save_image(config, csv_file, IMAGE_PATH): landmark_list_all = Landmark_list(config)", "class Landmark: def __init__(self, x=None, y=None, z=None, visibility=None): self.x = None self.y =", "landmark_drawing_spec: DrawingSpec = DrawingSpec(color=RED_COLOR, thickness=5), connection_drawing_spec: DrawingSpec = DrawingSpec(color=BLACK_COLOR, thickness=5), elevation: int =", "y, z, visibility=None): for i, landmark in enumerate(self.landmark_list): self.landmark_list[i].x = x[i] self.landmark_list[i].y =", "= Path(IMAGE_PATH) images = list(image_path.glob(\"*.png\")) images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) image_list = [] for", "Union from model_setup import Model_Setup import matplotlib.pyplot as plt import pandas as pd", "as plt import pandas as pd import imageio from pathlib import Path import", "(0, 0, 255) GREEN_COLOR = (0, 128, 0) BLUE_COLOR = (255, 0, 0)", "overwrite the original one optimize(gif_path) \"\"\" def save_video(IMAGE_PATH): image_folder = IMAGE_PATH video_name =", "from typing import List, Mapping, Optional, Tuple, Union from model_setup import Model_Setup import", "landmark list proto message to be plotted. connections: A list of landmark index", "connections' drawing settings such as color and line thickness. elevation: The elevation from", "= df_temp[df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df_temp[df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df_temp[df.columns.str.startswith(\"z\")].to_numpy().flatten() visibility = df_temp[df.columns.str.startswith(\"v\")].to_numpy().flatten() landmark_list.load_xyz(x, y,", "enumerate(self.landmark_list): self.landmark_list[i].x = x[i] self.landmark_list[i].y = y[i] self.landmark_list[i].z = z[i] if visibility !=", "pd import imageio from pathlib import Path import os import cv2 NUM_COORDS =", "if df.columns.str.startswith(\"v\").any() != False: visibility = df.loc[:, df.columns.str.startswith(\"v\")].to_numpy().flatten() self.Min_Max_axis[\"x_min\"] = min(x) self.Min_Max_axis[\"x_max\"] =", "x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) frame = cv2.imread(os.path.join(image_folder, images[0])) height, width, layers = frame.shape fps", "self.x = None self.y = None self.z = None self.visibility = None class", "NUM_COORDS = 33 WHITE_COLOR = (224, 224, 224) BLACK_COLOR = (0, 0, 0)", "= cv2.imread(os.path.join(image_folder, images[0])) height, width, layers = frame.shape fps = 24 video =", "in range(480,len(df)): if index % 1 == 0: landmark_list = Landmark_list(config) df_temp =", "for file_name in images: image_list.append(imageio.imread(file_name)) imageio.mimwrite(\"animated_from_images.gif\", image_list) \"\"\" from pygifsicle import optimize gif_path", "= max(y) self.Min_Max_axis[\"z_min\"] = min(z) self.Min_Max_axis[\"z_max\"] = max(z) if df.columns.str.startswith(\"v\").any(): self.load_xyz(x, y, z,", "annotation. Default to the white color. color: Tuple[int, int, int] = WHITE_COLOR #", "self.Min_Max_axis[\"z_max\"] = max(z) if df.columns.str.startswith(\"v\").any(): self.load_xyz(x, y, z, visibility) else: self.load_xyz(x, y, z)", "color=_normalize_color(connection_drawing_spec.color[::-1]), linewidth=connection_drawing_spec.thickness, ) plt.savefig(os.path.join(IMAGE_PATH, \"fram_sec_{}.png\".format(counter)), dpi=50) def save_gif(IMAGE_PATH): # https://medium.com/swlh/python-animated-images-6a85b9b68f86 image_path = Path(IMAGE_PATH)", "def load_df(self, df): x = df.loc[:, df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df.loc[:, df.columns.str.startswith(\"y\")].to_numpy().flatten() z =", "connection[0] end_idx = connection[1] if not (0 <= start_idx < num_landmarks and 0", "# Draws the connections if the start and end landmarks are both visible.", "settings such as color and line thickness. elevation: The elevation from which to", "x = df_temp[df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df_temp[df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df_temp[df.columns.str.startswith(\"z\")].to_numpy().flatten() visibility = df_temp[df.columns.str.startswith(\"v\")].to_numpy().flatten() landmark_list.load_xyz(x,", "\"video.avi\" images = [img for img in os.listdir(image_folder) if img.endswith(\".png\")] images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]),", "WHITE_COLOR = (224, 224, 224) BLACK_COLOR = (0, 0, 0) RED_COLOR = (0,", "128, 0) BLUE_COLOR = (255, 0, 0) _PRESENCE_THRESHOLD = 0.5 _VISIBILITY_THRESHOLD = 0.5", "object that specifies the landmarks' drawing settings such as color and line thickness.", "= plt.axes(projection=\"3d\") if Min_Max_axis: ax.set_xlim3d(-1 * Min_Max_axis[\"z_max\"], -1 * Min_Max_axis[\"z_min\"]) ax.set_ylim3d(Min_Max_axis[\"x_min\"], Min_Max_axis[\"x_max\"]) ax.set_zlim3d(-1", "that specifies how landmarks to be connected. landmark_drawing_spec: A DrawingSpec object that specifies", "0 counter = 0 for i in range(480,len(df)): if index % 1 ==", "for i, landmark in enumerate(self.landmark_list): self.landmark_list[i].x = x[i] self.landmark_list[i].y = y[i] self.landmark_list[i].z =", "= (0, 128, 0) BLUE_COLOR = (255, 0, 0) _PRESENCE_THRESHOLD = 0.5 _VISIBILITY_THRESHOLD", "xs=[-landmark.z], ys=[landmark.x], zs=[-landmark.y], color=_normalize_color(landmark_drawing_spec.color[::-1]), linewidth=landmark_drawing_spec.thickness, ) plotted_landmarks[idx] = (-landmark.z, landmark.x, -landmark.y) if connections:", "A normalized landmark list proto message to be plotted. connections: A list of", "num_landmarks): raise ValueError( f\"Landmark index is out of range. Invalid connection \" f\"from", "for img in os.listdir(image_folder) if img.endswith(\".png\")] images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) frame = cv2.imread(os.path.join(image_folder,", "Adopt the plot_landmarks from MediaPipe # https://github.com/google/mediapipe/blob/master/mediapipe/python/solutions/drawing_utils.py @dataclasses.dataclass class DrawingSpec: # Color for", "Min_Max_axis[\"x_max\"]) ax.set_zlim3d(-1 * Min_Max_axis[\"y_max\"], -1 * Min_Max_axis[\"y_min\"]) ax.view_init(elev=elevation, azim=azimuth) plotted_landmarks = {} for", "v in color) def plot_landmarks( landmark_list, connections: Optional[List[Tuple[int, int]]] = None, counter=None, IMAGE_PATH=None,", "line thickness. elevation: The elevation from which to view the plot. azimuth: the", "for connection in connections: start_idx = connection[0] end_idx = connection[1] if not (0", "image_list = [] for file_name in images: image_list.append(imageio.imread(file_name)) imageio.mimwrite(\"animated_from_images.gif\", image_list) \"\"\" from pygifsicle", "are both visible. for connection in connections: start_idx = connection[0] end_idx = connection[1]", "def load_xyz(self, x, y, z, visibility=None): for i, landmark in enumerate(self.landmark_list): self.landmark_list[i].x =", "for i in range(480,len(df)): if index % 1 == 0: landmark_list = Landmark_list(config)", "cv2.VideoWriter_fourcc(*\"DIVX\"), fps=fps, frameSize=(width, height) ) for image in images: video.write(cv2.imread(os.path.join(image_folder, image))) cv2.destroyAllWindows() video.release()", "https://medium.com/swlh/python-animated-images-6a85b9b68f86 image_path = Path(IMAGE_PATH) images = list(image_path.glob(\"*.png\")) images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) image_list =", "self.z = None self.visibility = None class Landmark_list: def __init__(self, config): self.config =", "= [] for i in range(self.list_size): obj = Landmark() self.landmark_list.append(obj) self.Min_Max_axis = {", "os.listdir(image_folder) if img.endswith(\".png\")] images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) frame = cv2.imread(os.path.join(image_folder, images[0])) height, width,", "continue ax.scatter3D( xs=[-landmark.z], ys=[landmark.x], zs=[-landmark.y], color=_normalize_color(landmark_drawing_spec.color[::-1]), linewidth=landmark_drawing_spec.thickness, ) plotted_landmarks[idx] = (-landmark.z, landmark.x, -landmark.y)", "self.Min_Max_axis = { \"x_min\": None, \"x_max\": None, \"y_min\": None, \"y_max\": None, \"z_min\": None,", "10, ): \"\"\"Plot the landmarks and the connections in matplotlib 3d. Args: landmark_list:", "range. Invalid connection \" f\"from landmark #{start_idx} to landmark #{end_idx}.\" ) if start_idx", "from pygifsicle import optimize gif_path = 'animated_from_video.gif'# create a new one optimize(gif_path, 'animated_from_video_optimized.gif')#", "visibility[i] def load_df(self, df): x = df.loc[:, df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df.loc[:, df.columns.str.startswith(\"y\")].to_numpy().flatten() z", "video_name, cv2.VideoWriter_fourcc(*\"DIVX\"), fps=fps, frameSize=(width, height) ) for image in images: video.write(cv2.imread(os.path.join(image_folder, image))) cv2.destroyAllWindows()", "Path import os import cv2 NUM_COORDS = 33 WHITE_COLOR = (224, 224, 224)", "plot_landmarks( landmark_list, connections: Optional[List[Tuple[int, int]]] = None, counter=None, IMAGE_PATH=None, Min_Max_axis=None, landmark_drawing_spec: DrawingSpec =", "z[i] if visibility != None: self.landmark_list[i].visibility = visibility[i] def load_df(self, df): x =", "Min_Max_axis[\"y_min\"]) ax.view_init(elev=elevation, azim=azimuth) plotted_landmarks = {} for idx, landmark in enumerate(landmark_list.landmark_list): if landmark.visibility", "landmark in enumerate(landmark_list.landmark_list): if landmark.visibility and landmark.visibility < _VISIBILITY_THRESHOLD: continue ax.scatter3D( xs=[-landmark.z], ys=[landmark.x],", "and end_idx in plotted_landmarks: landmark_pair = [ plotted_landmarks[start_idx], plotted_landmarks[end_idx], ] ax.plot3D( xs=[landmark_pair[0][0], landmark_pair[1][0]],", "matplotlib 3d. Args: landmark_list: A normalized landmark list proto message to be plotted.", "DrawingSpec object that specifies the landmarks' drawing settings such as color and line", "the original one optimize(gif_path) \"\"\" def save_video(IMAGE_PATH): image_folder = IMAGE_PATH video_name = \"video.avi\"", "y, z) def load_csv(self, RESULT_CSV): df = pd.read_csv(RESULT_CSV) self.load_df(df) return df def save_image(config,", "f\"from landmark #{start_idx} to landmark #{end_idx}.\" ) if start_idx in plotted_landmarks and end_idx", "plot. Raises: ValueError: If any connetions contain invalid landmark index. \"\"\" if not", "landmark.x, -landmark.y) if connections: num_landmarks = landmark_list.list_size # Draws the connections if the", "thickness. elevation: The elevation from which to view the plot. azimuth: the azimuth", "ax.set_ylim3d(Min_Max_axis[\"x_min\"], Min_Max_axis[\"x_max\"]) ax.set_zlim3d(-1 * Min_Max_axis[\"y_max\"], -1 * Min_Max_axis[\"y_min\"]) ax.view_init(elev=elevation, azim=azimuth) plotted_landmarks = {}", "thickness: int = 2 # Circle radius. Default to 2 pixels. circle_radius: int", "self.load_xyz(x, y, z) def load_csv(self, RESULT_CSV): df = pd.read_csv(RESULT_CSV) self.load_df(df) return df def", "def plot_landmarks( landmark_list, connections: Optional[List[Tuple[int, int]]] = None, counter=None, IMAGE_PATH=None, Min_Max_axis=None, landmark_drawing_spec: DrawingSpec", "out of range. Invalid connection \" f\"from landmark #{start_idx} to landmark #{end_idx}.\" )", "one optimize(gif_path) \"\"\" def save_video(IMAGE_PATH): image_folder = IMAGE_PATH video_name = \"video.avi\" images =", "to 2 pixels. thickness: int = 2 # Circle radius. Default to 2", "to landmark #{end_idx}.\" ) if start_idx in plotted_landmarks and end_idx in plotted_landmarks: landmark_pair", "y = df.loc[:, df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df.loc[:, df.columns.str.startswith(\"z\")].to_numpy().flatten() if df.columns.str.startswith(\"v\").any() != False: visibility", "z = df.loc[:, df.columns.str.startswith(\"z\")].to_numpy().flatten() if df.columns.str.startswith(\"v\").any() != False: visibility = df.loc[:, df.columns.str.startswith(\"v\")].to_numpy().flatten() self.Min_Max_axis[\"x_min\"]", "connections: A list of landmark index tuples that specifies how landmarks to be", "list proto message to be plotted. connections: A list of landmark index tuples", "= IMAGE_PATH video_name = \"video.avi\" images = [img for img in os.listdir(image_folder) if", "the plot. Raises: ValueError: If any connetions contain invalid landmark index. \"\"\" if", "If any connetions contain invalid landmark index. \"\"\" if not landmark_list: return fig", "view the plot. azimuth: the azimuth angle to rotate the plot. Raises: ValueError:", "3 class Landmark: def __init__(self, x=None, y=None, z=None, visibility=None): self.x = None self.y", "color: Tuple[int, int, int] = WHITE_COLOR # Thickness for drawing the annotation. Default", "2 def _normalize_color(color): return tuple(v / 255.0 for v in color) def plot_landmarks(", "to the white color. color: Tuple[int, int, int] = WHITE_COLOR # Thickness for", "min(y) self.Min_Max_axis[\"y_max\"] = max(y) self.Min_Max_axis[\"z_min\"] = min(z) self.Min_Max_axis[\"z_max\"] = max(z) if df.columns.str.startswith(\"v\").any(): self.load_xyz(x,", "self.config.NUM_COORDS self.landmark_list = [] for i in range(self.list_size): obj = Landmark() self.landmark_list.append(obj) self.Min_Max_axis", "= Landmark_list(config) df = landmark_list_all.load_csv(csv_file) landmark_list_all.load_df(df) # Plot every frame index = 0", "image_folder = IMAGE_PATH video_name = \"video.avi\" images = [img for img in os.listdir(image_folder)", "= min(z) self.Min_Max_axis[\"z_max\"] = max(z) if df.columns.str.startswith(\"v\").any(): self.load_xyz(x, y, z, visibility) else: self.load_xyz(x,", "images = [img for img in os.listdir(image_folder) if img.endswith(\".png\")] images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False)", "plotted_landmarks[end_idx], ] ax.plot3D( xs=[landmark_pair[0][0], landmark_pair[1][0]], ys=[landmark_pair[0][1], landmark_pair[1][1]], zs=[landmark_pair[0][2], landmark_pair[1][2]], color=_normalize_color(connection_drawing_spec.color[::-1]), linewidth=connection_drawing_spec.thickness, ) plt.savefig(os.path.join(IMAGE_PATH,", "None, \"x_max\": None, \"y_min\": None, \"y_max\": None, \"z_min\": None, \"z_max\": None, } def", "color. color: Tuple[int, int, int] = WHITE_COLOR # Thickness for drawing the annotation.", "images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) frame = cv2.imread(os.path.join(image_folder, images[0])) height, width, layers = frame.shape", "be connected. landmark_drawing_spec: A DrawingSpec object that specifies the landmarks' drawing settings such", "self.Min_Max_axis[\"x_min\"] = min(x) self.Min_Max_axis[\"x_max\"] = max(x) self.Min_Max_axis[\"y_min\"] = min(y) self.Min_Max_axis[\"y_max\"] = max(y) self.Min_Max_axis[\"z_min\"]", "= connection[0] end_idx = connection[1] if not (0 <= start_idx < num_landmarks and", "min(x) self.Min_Max_axis[\"x_max\"] = max(x) self.Min_Max_axis[\"y_min\"] = min(y) self.Min_Max_axis[\"y_max\"] = max(y) self.Min_Max_axis[\"z_min\"] = min(z)", "def __init__(self, config): self.config = config self.list_size = self.config.NUM_COORDS self.landmark_list = [] for", "'animated_from_video_optimized.gif')# overwrite the original one optimize(gif_path) \"\"\" def save_video(IMAGE_PATH): image_folder = IMAGE_PATH video_name", "every frame index = 0 counter = 0 for i in range(480,len(df)): if", "color=_normalize_color(landmark_drawing_spec.color[::-1]), linewidth=landmark_drawing_spec.thickness, ) plotted_landmarks[idx] = (-landmark.z, landmark.x, -landmark.y) if connections: num_landmarks = landmark_list.list_size", "landmarks and the connections in matplotlib 3d. Args: landmark_list: A normalized landmark list", "such as color and line thickness. elevation: The elevation from which to view", "landmark_pair[1][1]], zs=[landmark_pair[0][2], landmark_pair[1][2]], color=_normalize_color(connection_drawing_spec.color[::-1]), linewidth=connection_drawing_spec.thickness, ) plt.savefig(os.path.join(IMAGE_PATH, \"fram_sec_{}.png\".format(counter)), dpi=50) def save_gif(IMAGE_PATH): # https://medium.com/swlh/python-animated-images-6a85b9b68f86", "3d. Args: landmark_list: A normalized landmark list proto message to be plotted. connections:", "max(z) if df.columns.str.startswith(\"v\").any(): self.load_xyz(x, y, z, visibility) else: self.load_xyz(x, y, z) def load_csv(self,", "<filename>Plot3D.py import dataclasses from typing import List, Mapping, Optional, Tuple, Union from model_setup", "rotate the plot. Raises: ValueError: If any connetions contain invalid landmark index. \"\"\"", "and end landmarks are both visible. for connection in connections: start_idx = connection[0]", "landmark_pair[1][2]], color=_normalize_color(connection_drawing_spec.color[::-1]), linewidth=connection_drawing_spec.thickness, ) plt.savefig(os.path.join(IMAGE_PATH, \"fram_sec_{}.png\".format(counter)), dpi=50) def save_gif(IMAGE_PATH): # https://medium.com/swlh/python-animated-images-6a85b9b68f86 image_path =", "for drawing the annotation. Default to the white color. color: Tuple[int, int, int]", ") plotted_landmarks[idx] = (-landmark.z, landmark.x, -landmark.y) if connections: num_landmarks = landmark_list.list_size # Draws", "Default to 2 pixels. thickness: int = 2 # Circle radius. Default to", "_VISIBILITY_THRESHOLD: continue ax.scatter3D( xs=[-landmark.z], ys=[landmark.x], zs=[-landmark.y], color=_normalize_color(landmark_drawing_spec.color[::-1]), linewidth=landmark_drawing_spec.thickness, ) plotted_landmarks[idx] = (-landmark.z, landmark.x,", "int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) image_list = [] for file_name in images: image_list.append(imageio.imread(file_name)) imageio.mimwrite(\"animated_from_images.gif\", image_list) \"\"\"", "df.loc[:, df.columns.str.startswith(\"v\")].to_numpy().flatten() self.Min_Max_axis[\"x_min\"] = min(x) self.Min_Max_axis[\"x_max\"] = max(x) self.Min_Max_axis[\"y_min\"] = min(y) self.Min_Max_axis[\"y_max\"] =", "list(image_path.glob(\"*.png\")) images.sort(key=lambda x: int(x.split(\"_\")[2].split(\".\")[0]), reverse=False) image_list = [] for file_name in images: image_list.append(imageio.imread(file_name))", "tuple(v / 255.0 for v in color) def plot_landmarks( landmark_list, connections: Optional[List[Tuple[int, int]]]", "def _normalize_color(color): return tuple(v / 255.0 for v in color) def plot_landmarks( landmark_list,", "df_temp[df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df_temp[df.columns.str.startswith(\"z\")].to_numpy().flatten() visibility = df_temp[df.columns.str.startswith(\"v\")].to_numpy().flatten() landmark_list.load_xyz(x, y, z, visibility) plot_landmarks( landmark_list,", "(0, 0, 0) RED_COLOR = (0, 0, 255) GREEN_COLOR = (0, 128, 0)", "x, y, z, visibility=None): for i, landmark in enumerate(self.landmark_list): self.landmark_list[i].x = x[i] self.landmark_list[i].y", "RESULT_CSV): df = pd.read_csv(RESULT_CSV) self.load_df(df) return df def save_image(config, csv_file, IMAGE_PATH): landmark_list_all =", "None, \"z_min\": None, \"z_max\": None, } def load_xyz(self, x, y, z, visibility=None): for", "if Min_Max_axis: ax.set_xlim3d(-1 * Min_Max_axis[\"z_max\"], -1 * Min_Max_axis[\"z_min\"]) ax.set_ylim3d(Min_Max_axis[\"x_min\"], Min_Max_axis[\"x_max\"]) ax.set_zlim3d(-1 * Min_Max_axis[\"y_max\"],", "} def load_xyz(self, x, y, z, visibility=None): for i, landmark in enumerate(self.landmark_list): self.landmark_list[i].x", "if index % 1 == 0: landmark_list = Landmark_list(config) df_temp = df.iloc[i, :]", "connection_drawing_spec: DrawingSpec = DrawingSpec(color=BLACK_COLOR, thickness=5), elevation: int = 10, azimuth: int = 10,", "(0, 128, 0) BLUE_COLOR = (255, 0, 0) _PRESENCE_THRESHOLD = 0.5 _VISIBILITY_THRESHOLD =", "= None self.visibility = None class Landmark_list: def __init__(self, config): self.config = config", "object that specifies the connections' drawing settings such as color and line thickness.", "= cv2.VideoWriter( video_name, cv2.VideoWriter_fourcc(*\"DIVX\"), fps=fps, frameSize=(width, height) ) for image in images: video.write(cv2.imread(os.path.join(image_folder,", "as pd import imageio from pathlib import Path import os import cv2 NUM_COORDS", "df.columns.str.startswith(\"v\")].to_numpy().flatten() self.Min_Max_axis[\"x_min\"] = min(x) self.Min_Max_axis[\"x_max\"] = max(x) self.Min_Max_axis[\"y_min\"] = min(y) self.Min_Max_axis[\"y_max\"] = max(y)", "df_temp[df.columns.str.startswith(\"z\")].to_numpy().flatten() visibility = df_temp[df.columns.str.startswith(\"v\")].to_numpy().flatten() landmark_list.load_xyz(x, y, z, visibility) plot_landmarks( landmark_list, config.POSE_CONNECTIONS, counter=counter, IMAGE_PATH=IMAGE_PATH,", "the landmarks and the connections in matplotlib 3d. Args: landmark_list: A normalized landmark", "z) def load_csv(self, RESULT_CSV): df = pd.read_csv(RESULT_CSV) self.load_df(df) return df def save_image(config, csv_file,", "0, 255) GREEN_COLOR = (0, 128, 0) BLUE_COLOR = (255, 0, 0) _PRESENCE_THRESHOLD", ") plt.savefig(os.path.join(IMAGE_PATH, \"fram_sec_{}.png\".format(counter)), dpi=50) def save_gif(IMAGE_PATH): # https://medium.com/swlh/python-animated-images-6a85b9b68f86 image_path = Path(IMAGE_PATH) images =", "config.POSE_CONNECTIONS, counter=counter, IMAGE_PATH=IMAGE_PATH, Min_Max_axis=landmark_list_all.Min_Max_axis, ) counter += 1 index += 1 # Adopt", "self.visibility = None class Landmark_list: def __init__(self, config): self.config = config self.list_size =", "\" f\"from landmark #{start_idx} to landmark #{end_idx}.\" ) if start_idx in plotted_landmarks and", "Args: landmark_list: A normalized landmark list proto message to be plotted. connections: A", "_RGB_CHANNELS = 3 class Landmark: def __init__(self, x=None, y=None, z=None, visibility=None): self.x =", "if landmark.visibility and landmark.visibility < _VISIBILITY_THRESHOLD: continue ax.scatter3D( xs=[-landmark.z], ys=[landmark.x], zs=[-landmark.y], color=_normalize_color(landmark_drawing_spec.color[::-1]), linewidth=landmark_drawing_spec.thickness,", "obj = Landmark() self.landmark_list.append(obj) self.Min_Max_axis = { \"x_min\": None, \"x_max\": None, \"y_min\": None,", "z=None, visibility=None): self.x = None self.y = None self.z = None self.visibility =", "\"\"\" if not landmark_list: return fig = plt.figure(figsize=(10, 10)) ax = plt.axes(projection=\"3d\") if", "in enumerate(self.landmark_list): self.landmark_list[i].x = x[i] self.landmark_list[i].y = y[i] self.landmark_list[i].z = z[i] if visibility", "= 0.5 _VISIBILITY_THRESHOLD = 0.5 _RGB_CHANNELS = 3 class Landmark: def __init__(self, x=None,", "_VISIBILITY_THRESHOLD = 0.5 _RGB_CHANNELS = 3 class Landmark: def __init__(self, x=None, y=None, z=None,", "Mapping, Optional, Tuple, Union from model_setup import Model_Setup import matplotlib.pyplot as plt import", "Draws the connections if the start and end landmarks are both visible. for", "def load_csv(self, RESULT_CSV): df = pd.read_csv(RESULT_CSV) self.load_df(df) return df def save_image(config, csv_file, IMAGE_PATH):", "azim=azimuth) plotted_landmarks = {} for idx, landmark in enumerate(landmark_list.landmark_list): if landmark.visibility and landmark.visibility", "z, visibility) else: self.load_xyz(x, y, z) def load_csv(self, RESULT_CSV): df = pd.read_csv(RESULT_CSV) self.load_df(df)", "ValueError( f\"Landmark index is out of range. Invalid connection \" f\"from landmark #{start_idx}", "save_image(config, csv_file, IMAGE_PATH): landmark_list_all = Landmark_list(config) df = landmark_list_all.load_csv(csv_file) landmark_list_all.load_df(df) # Plot every", "end_idx in plotted_landmarks: landmark_pair = [ plotted_landmarks[start_idx], plotted_landmarks[end_idx], ] ax.plot3D( xs=[landmark_pair[0][0], landmark_pair[1][0]], ys=[landmark_pair[0][1],", "[ plotted_landmarks[start_idx], plotted_landmarks[end_idx], ] ax.plot3D( xs=[landmark_pair[0][0], landmark_pair[1][0]], ys=[landmark_pair[0][1], landmark_pair[1][1]], zs=[landmark_pair[0][2], landmark_pair[1][2]], color=_normalize_color(connection_drawing_spec.color[::-1]), linewidth=connection_drawing_spec.thickness,", "df.loc[:, df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df.loc[:, df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df.loc[:, df.columns.str.startswith(\"z\")].to_numpy().flatten() if df.columns.str.startswith(\"v\").any() !=", "one optimize(gif_path, 'animated_from_video_optimized.gif')# overwrite the original one optimize(gif_path) \"\"\" def save_video(IMAGE_PATH): image_folder =", "x=None, y=None, z=None, visibility=None): self.x = None self.y = None self.z = None", "DrawingSpec(color=BLACK_COLOR, thickness=5), elevation: int = 10, azimuth: int = 10, ): \"\"\"Plot the", "DrawingSpec object that specifies the connections' drawing settings such as color and line", "ys=[landmark.x], zs=[-landmark.y], color=_normalize_color(landmark_drawing_spec.color[::-1]), linewidth=landmark_drawing_spec.thickness, ) plotted_landmarks[idx] = (-landmark.z, landmark.x, -landmark.y) if connections: num_landmarks", "1 index += 1 # Adopt the plot_landmarks from MediaPipe # https://github.com/google/mediapipe/blob/master/mediapipe/python/solutions/drawing_utils.py @dataclasses.dataclass", "from which to view the plot. azimuth: the azimuth angle to rotate the", "start_idx in plotted_landmarks and end_idx in plotted_landmarks: landmark_pair = [ plotted_landmarks[start_idx], plotted_landmarks[end_idx], ]", "x[i] self.landmark_list[i].y = y[i] self.landmark_list[i].z = z[i] if visibility != None: self.landmark_list[i].visibility =", "{} for idx, landmark in enumerate(landmark_list.landmark_list): if landmark.visibility and landmark.visibility < _VISIBILITY_THRESHOLD: continue", "-1 * Min_Max_axis[\"z_min\"]) ax.set_ylim3d(Min_Max_axis[\"x_min\"], Min_Max_axis[\"x_max\"]) ax.set_zlim3d(-1 * Min_Max_axis[\"y_max\"], -1 * Min_Max_axis[\"y_min\"]) ax.view_init(elev=elevation, azim=azimuth)", "0, 0) _PRESENCE_THRESHOLD = 0.5 _VISIBILITY_THRESHOLD = 0.5 _RGB_CHANNELS = 3 class Landmark:", "any connetions contain invalid landmark index. \"\"\" if not landmark_list: return fig =", "is out of range. Invalid connection \" f\"from landmark #{start_idx} to landmark #{end_idx}.\"", "IMAGE_PATH): landmark_list_all = Landmark_list(config) df = landmark_list_all.load_csv(csv_file) landmark_list_all.load_df(df) # Plot every frame index", "the annotation. Default to 2 pixels. thickness: int = 2 # Circle radius.", "\"z_max\": None, } def load_xyz(self, x, y, z, visibility=None): for i, landmark in", "angle to rotate the plot. Raises: ValueError: If any connetions contain invalid landmark", "import dataclasses from typing import List, Mapping, Optional, Tuple, Union from model_setup import", "self.landmark_list[i].z = z[i] if visibility != None: self.landmark_list[i].visibility = visibility[i] def load_df(self, df):", "ax = plt.axes(projection=\"3d\") if Min_Max_axis: ax.set_xlim3d(-1 * Min_Max_axis[\"z_max\"], -1 * Min_Max_axis[\"z_min\"]) ax.set_ylim3d(Min_Max_axis[\"x_min\"], Min_Max_axis[\"x_max\"])", "= df_temp[df.columns.str.startswith(\"z\")].to_numpy().flatten() visibility = df_temp[df.columns.str.startswith(\"v\")].to_numpy().flatten() landmark_list.load_xyz(x, y, z, visibility) plot_landmarks( landmark_list, config.POSE_CONNECTIONS, counter=counter,", "_normalize_color(color): return tuple(v / 255.0 for v in color) def plot_landmarks( landmark_list, connections:", "Min_Max_axis[\"z_min\"]) ax.set_ylim3d(Min_Max_axis[\"x_min\"], Min_Max_axis[\"x_max\"]) ax.set_zlim3d(-1 * Min_Max_axis[\"y_max\"], -1 * Min_Max_axis[\"y_min\"]) ax.view_init(elev=elevation, azim=azimuth) plotted_landmarks =", "load_df(self, df): x = df.loc[:, df.columns.str.startswith(\"x\")].to_numpy().flatten() y = df.loc[:, df.columns.str.startswith(\"y\")].to_numpy().flatten() z = df.loc[:,", "azimuth: int = 10, ): \"\"\"Plot the landmarks and the connections in matplotlib", "index is out of range. Invalid connection \" f\"from landmark #{start_idx} to landmark", "connection_drawing_spec: A DrawingSpec object that specifies the connections' drawing settings such as color", "int = 2 # Circle radius. Default to 2 pixels. circle_radius: int =", "idx, landmark in enumerate(landmark_list.landmark_list): if landmark.visibility and landmark.visibility < _VISIBILITY_THRESHOLD: continue ax.scatter3D( xs=[-landmark.z]," ]
[ "Any, b: Callable[[], Tuple[int, int, str]], c: Set[str], d: Optional[Sequence[int]] = None, e:", "Tuple[int, int, str]], c: Set[str], d: Optional[Sequence[int]] = None, e: Any = None,", "None, e: Any = None, ) -> None: pass print(\"Hello world\") foo(a=1, b=lambda:", "int, str]], c: Set[str], d: Optional[Sequence[int]] = None, e: Any = None, )", "d: Optional[Sequence[int]] = None, e: Any = None, ) -> None: pass print(\"Hello", "a: Any, b: Callable[[], Tuple[int, int, str]], c: Set[str], d: Optional[Sequence[int]] = None,", "Callable, Optional, Sequence, Set, Tuple def foo( a: Any, b: Callable[[], Tuple[int, int,", "Optional, Sequence, Set, Tuple def foo( a: Any, b: Callable[[], Tuple[int, int, str]],", "Any = None, ) -> None: pass print(\"Hello world\") foo(a=1, b=lambda: (1, 2,", "Any, Callable, Optional, Sequence, Set, Tuple def foo( a: Any, b: Callable[[], Tuple[int,", "typing import Any, Callable, Optional, Sequence, Set, Tuple def foo( a: Any, b:", "Set[str], d: Optional[Sequence[int]] = None, e: Any = None, ) -> None: pass", "str]], c: Set[str], d: Optional[Sequence[int]] = None, e: Any = None, ) ->", ") -> None: pass print(\"Hello world\") foo(a=1, b=lambda: (1, 2, \"hoge\"), c=set(), d=None,", "c: Set[str], d: Optional[Sequence[int]] = None, e: Any = None, ) -> None:", "def foo( a: Any, b: Callable[[], Tuple[int, int, str]], c: Set[str], d: Optional[Sequence[int]]", "Sequence, Set, Tuple def foo( a: Any, b: Callable[[], Tuple[int, int, str]], c:", "None, ) -> None: pass print(\"Hello world\") foo(a=1, b=lambda: (1, 2, \"hoge\"), c=set(),", "= None, e: Any = None, ) -> None: pass print(\"Hello world\") foo(a=1,", "e: Any = None, ) -> None: pass print(\"Hello world\") foo(a=1, b=lambda: (1,", "Callable[[], Tuple[int, int, str]], c: Set[str], d: Optional[Sequence[int]] = None, e: Any =", "import Any, Callable, Optional, Sequence, Set, Tuple def foo( a: Any, b: Callable[[],", "Optional[Sequence[int]] = None, e: Any = None, ) -> None: pass print(\"Hello world\")", "b: Callable[[], Tuple[int, int, str]], c: Set[str], d: Optional[Sequence[int]] = None, e: Any", "Set, Tuple def foo( a: Any, b: Callable[[], Tuple[int, int, str]], c: Set[str],", "foo( a: Any, b: Callable[[], Tuple[int, int, str]], c: Set[str], d: Optional[Sequence[int]] =", "from typing import Any, Callable, Optional, Sequence, Set, Tuple def foo( a: Any,", "= None, ) -> None: pass print(\"Hello world\") foo(a=1, b=lambda: (1, 2, \"hoge\"),", "Tuple def foo( a: Any, b: Callable[[], Tuple[int, int, str]], c: Set[str], d:", "-> None: pass print(\"Hello world\") foo(a=1, b=lambda: (1, 2, \"hoge\"), c=set(), d=None, e=None)" ]
[ "TenSEALContext object\"\"\" return cls(data=data) @property def auto_mod_switch(self) -> bool: return self.data.auto_mod_switch @auto_mod_switch.setter def", "def is_public(self) -> bool: return self.data.is_public() def make_context_public( self, generate_galois_keys: bool = False,", "int = None, coeff_mod_bit_sizes: List[int] = [], encryption_type: ENCRYPTION_TYPE = ENCRYPTION_TYPE.ASYMMETRIC, n_threads: int", "ENCRYPTION_TYPE.SYMMETRIC. n_threads: define number of threads that shall be later used for parallel", "native_type): raise TypeError(f\"value must be of type {native_type}\") self._data = value def copy(self)", "self.data.is_private() def is_public(self) -> bool: return self.data.is_public() def make_context_public( self, generate_galois_keys: bool =", "shall be later used for parallel computation. data: A TenSEALContext to wrap. We", "Key(ABC): \"\"\"Wrapper class for encryption keys\"\"\" def __init__(self, data): self.data = data @property", "save_galois_keys: bool = True, save_relin_keys: bool = True, ) -> bytes: \"\"\"Serialize the", "plaintext modulus. Should not be passed when the scheme is CKKS. coeff_mod_bit_sizes: List", "= value @property def auto_relin(self) -> bool: return self.data.auto_relin @auto_relin.setter def auto_relin(self, value:", "either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS. poly_modulus_degree: The degree of the polynomial modulus, must be", "save_secret_key, save_galois_keys, save_relin_keys ) @property def global_scale(self) -> float: return self.data.global_scale @global_scale.setter def", "TenSEALContext to wrap. We won't construct a new object if it's passed. Returns:", "PublicKey(self.data.public_key()) def is_private(self) -> bool: return self.data.is_private() def is_public(self) -> bool: return self.data.is_public()", "bool = False ): \"\"\"Drop secret part from the context. This is useful", "\"\"\"The Context manages everything related to the encrypted computation, including keys, which optimization", "prior to this call if not (isinstance(n_threads, int) and n_threads > 0): n_threads", "a context from a serialized buffer. Args: data : bytes buffer from the", "-> bool: return self.data.has_galois_keys() def galois_keys(self) -> GaloisKeys: return GaloisKeys(self.data.galois_keys()) def generate_galois_keys(self, secret_key:", "threads that shall be later used for parallel computation. data: A TenSEALContext to", "SCHEME_TYPE(Enum): NONE = ts._ts_cpp.SCHEME_TYPE.NONE BFV = ts._ts_cpp.SCHEME_TYPE.BFV CKKS = ts._ts_cpp.SCHEME_TYPE.CKKS class Key(ABC): \"\"\"Wrapper", "= value @classmethod def _wrap( cls, data: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ],", "@auto_rescale.setter def auto_rescale(self, value: bool): self.data.auto_rescale = value def has_galois_keys(self) -> bool: return", "many threads should run for a parallel computation. \"\"\" import multiprocessing from enum", "the low level TenSEALContext object\"\"\" return cls(data=data) @property def auto_mod_switch(self) -> bool: return", "must be of type {native_type}\") self._data = value def copy(self) -> \"Context\": return", "return self._data @data.setter def data(self, value: ts._ts_cpp.TenSEALContext): \"\"\"Set the wrapped low level TenSEALContext", "Args: scheme : define the scheme to be used, either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS.", "self, scheme: SCHEME_TYPE = None, poly_modulus_degree: int = None, plain_modulus: int = None,", "for every key type, to differentiate between them only class SecretKey(Key): pass class", "n_threads, ) @property def data(self) -> ts._ts_cpp.TenSEALContext: \"\"\"Get the wrapped low level TenSEALContext", "import multiprocessing from enum import Enum from typing import List, Union from abc", "data: ts._ts_cpp.TenSEALContext = None, ): \"\"\"Construct a context that holds keys and parameters", "return self._data @data.setter def data( self, value: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ],", "@auto_relin.setter def auto_relin(self, value: bool): self.data.auto_relin = value @property def auto_rescale(self) -> bool:", "GaloisKeys(self.data.galois_keys()) def generate_galois_keys(self, secret_key: SecretKey = None): if secret_key is None: self.data.generate_galois_keys() elif", "data is not None: self.data = data return # constructing a new object", "a context that holds keys and parameters needed for operating encrypted tensors using", "], ): \"\"\"Return a new key object wrapping the low level key object\"\"\"", "scheme == SCHEME_TYPE.BFV: if plain_modulus is None: raise ValueError(\"plain_modulus must be provided\") elif", "save_relin_keys ) @property def global_scale(self) -> float: return self.data.global_scale @global_scale.setter def global_scale(self, value:", "Context object wrapping the low level TenSEALContext object\"\"\" return cls(data=data) @property def auto_mod_switch(self)", "value: ts._ts_cpp.TenSEALContext): \"\"\"Set the wrapped low level TenSEALContext object\"\"\" native_type = ts._ts_cpp.TenSEALContext if", "data : bytes buffer from the original context. n_threads: define number of threads", ") -> Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ]: \"\"\"Get the wrapped low level", "size for each coeffecient modulus. Can be an empty list for BFV, a", "used to decrypt values. Args: generate_galois_keys: should we generate galois-keys before dropping the", "set prior to this call if not (isinstance(n_threads, int) and n_threads > 0):", "self._wrap(self.data.copy()) def __copy__(self) -> \"Context\": return self.copy() @classmethod def load(cls, data: bytes, n_threads:", "int, but the value doesn't matter for ckks plain_modulus = 0 else: raise", "Context object. \"\"\" # wrapping if data is not None: self.data = data", "save_public_key: bool = True, save_secret_key: bool = False, save_galois_keys: bool = True, save_relin_keys:", "pass class RelinKeys(Key): pass class Context: def __init__( self, scheme: SCHEME_TYPE = None,", "be used, either ENCRYPTION_TYPE.ASYMMETRIC, or ENCRYPTION_TYPE.SYMMETRIC. n_threads: define number of threads that shall", "TypeError(f\"incorrect type: {type(secret_key)} != SecretKey\") def has_relin_keys(self) -> bool: return self.data.has_relin_keys() def relin_keys(self)", "PublicKey(Key): pass class GaloisKeys(Key): pass class RelinKeys(Key): pass class Context: def __init__( self,", "pass None here, everything should be set prior to this call if not", "be given. encryption_type : define the encryption type to be used, either ENCRYPTION_TYPE.ASYMMETRIC,", "parallel computation. data: A TenSEALContext to wrap. We won't construct a new object", "elif scheme == SCHEME_TYPE.CKKS: # must be int, but the value doesn't matter", "decrypt values. Args: generate_galois_keys: should we generate galois-keys before dropping the secret-key? generate_relin_keys:", "], ): \"\"\"Set the wrapped low level key object\"\"\" native_type = getattr(ts._ts_cpp, self.__class__.__name__)", "use either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS\") # We can't pass None here, everything should", "-> bool: return self.data.has_public_key() def public_key(self) -> PublicKey: return PublicKey(self.data.public_key()) def is_private(self) ->", "secret_key is None: self.data.generate_galois_keys() elif isinstance(secret_key, SecretKey): self.data.generate_galois_keys(secret_key.data) else: raise TypeError(f\"incorrect type: {type(secret_key)}", "ts._ts_cpp.TenSEALContext = None, ): \"\"\"Construct a context that holds keys and parameters needed", "def generate_relin_keys(self, secret_key: SecretKey = None): if secret_key is None: self.data.generate_relin_keys() elif isinstance(secret_key,", "including keys, which optimization should be enabled, and how many threads should run", "self, ) -> Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ]: \"\"\"Get the wrapped low", "the wrapped low level key object\"\"\" native_type = getattr(ts._ts_cpp, self.__class__.__name__) if not isinstance(value,", "or ENCRYPTION_TYPE.SYMMETRIC. n_threads: define number of threads that shall be later used for", "either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS\") # We can't pass None here, everything should be", "value doesn't matter for ckks plain_modulus = 0 else: raise ValueError(\"Invalid scheme type,", "matter for ckks plain_modulus = 0 else: raise ValueError(\"Invalid scheme type, use either", "\"Context\": return self.copy() @classmethod def load(cls, data: bytes, n_threads: int = None) ->", "SCHEME_TYPE.CKKS\") # We can't pass None here, everything should be set prior to", "\"\"\"Serialize the context into a stream of bytes.\"\"\" return self.data.serialize( save_public_key, save_secret_key, save_galois_keys,", "= multiprocessing.cpu_count() self.data = ts._ts_cpp.TenSEALContext.new( scheme.value, poly_modulus_degree, plain_modulus, coeff_mod_bit_sizes, encryption_type.value, n_threads, ) @property", "secret_key: SecretKey = None): if secret_key is None: self.data.generate_galois_keys() elif isinstance(secret_key, SecretKey): self.data.generate_galois_keys(secret_key.data)", "provided\") elif scheme == SCHEME_TYPE.CKKS: # must be int, but the value doesn't", "of type {native_type}\") self._data = value def copy(self) -> \"Context\": return self._wrap(self.data.copy()) def", "the wrapped low level key object\"\"\" return self._data @data.setter def data( self, value:", "a parallel computation. \"\"\" import multiprocessing from enum import Enum from typing import", "BFV, a default value will be given. encryption_type : define the encryption type", "def global_scale(self) -> float: return self.data.global_scale @global_scale.setter def global_scale(self, value: float): self.data.global_scale =", "value def has_galois_keys(self) -> bool: return self.data.has_galois_keys() def galois_keys(self) -> GaloisKeys: return GaloisKeys(self.data.galois_keys())", "of two. plain_modulus: The plaintext modulus. Should not be passed when the scheme", "self.data.has_relin_keys() def relin_keys(self) -> RelinKeys: return RelinKeys(self.data.relin_keys()) def generate_relin_keys(self, secret_key: SecretKey = None):", "how many threads should run for a parallel computation. \"\"\" import multiprocessing from", "-> SecretKey: return SecretKey(self.data.secret_key()) def has_public_key(self) -> bool: return self.data.has_public_key() def public_key(self) ->", "a new object if scheme == SCHEME_TYPE.BFV: if plain_modulus is None: raise ValueError(\"plain_modulus", "for parallel computation. Returns: A Context object. \"\"\" if n_threads: return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data, n_threads))", "doesn't matter for ckks plain_modulus = 0 else: raise ValueError(\"Invalid scheme type, use", "empty list for BFV, a default value will be given. encryption_type : define", "self._data = value def copy(self) -> \"Context\": return self._wrap(self.data.copy()) def __copy__(self) -> \"Context\":", "SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS\") # We can't pass None here, everything should be set", "threads that shall be later used for parallel computation. Returns: A Context object.", "a serialized buffer. Args: data : bytes buffer from the original context. n_threads:", "{native_type}\") self._data = value @classmethod def _wrap( cls, data: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys,", "bool: return self.data.is_public() def make_context_public( self, generate_galois_keys: bool = False, generate_relin_keys: bool =", "before dropping the secret-key? generate_relin_keys: should we generate relin-keys before dropping the secret-key?", "n_threads > 0): n_threads = multiprocessing.cpu_count() self.data = ts._ts_cpp.TenSEALContext.new( scheme.value, poly_modulus_degree, plain_modulus, coeff_mod_bit_sizes,", "from the original context. n_threads: define number of threads that shall be later", "is_public(self) -> bool: return self.data.is_public() def make_context_public( self, generate_galois_keys: bool = False, generate_relin_keys:", "def has_relin_keys(self) -> bool: return self.data.has_relin_keys() def relin_keys(self) -> RelinKeys: return RelinKeys(self.data.relin_keys()) def", "= False, generate_relin_keys: bool = False ): \"\"\"Drop secret part from the context.", "tensors using either BFV or CKKS scheme. Args: scheme : define the scheme", "save_secret_key: bool = False, save_galois_keys: bool = True, save_relin_keys: bool = True, )", "a new key object wrapping the low level key object\"\"\" return cls(data) #", "): \"\"\"Drop secret part from the context. This is useful before sending the", "\"\"\"Wrapper class for encryption keys\"\"\" def __init__(self, data): self.data = data @property def", "data( self, value: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ): \"\"\"Set the wrapped", "Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ): \"\"\"Return a new key object wrapping", "that shall be later used for parallel computation. data: A TenSEALContext to wrap.", "This is useful before sending the context for remote computation, as we don't", "ts._ts_cpp.TenSEALContext if not isinstance(value, native_type): raise TypeError(f\"value must be of type {native_type}\") self._data", "scheme.value, poly_modulus_degree, plain_modulus, coeff_mod_bit_sizes, encryption_type.value, n_threads, ) @property def data(self) -> ts._ts_cpp.TenSEALContext: \"\"\"Get", "as ts class ENCRYPTION_TYPE(Enum): ASYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.ASYMMETRIC SYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.SYMMETRIC class SCHEME_TYPE(Enum): NONE", "not (isinstance(n_threads, int) and n_threads > 0): n_threads = multiprocessing.cpu_count() self.data = ts._ts_cpp.TenSEALContext.new(", "a default value will be given. encryption_type : define the encryption type to", "if plain_modulus is None: raise ValueError(\"plain_modulus must be provided\") elif scheme == SCHEME_TYPE.CKKS:", "context. n_threads: define number of threads that shall be later used for parallel", "-> \"Context\": return self.copy() @classmethod def load(cls, data: bytes, n_threads: int = None)", "Enum from typing import List, Union from abc import ABC import tenseal as", "wrapped low level TenSEALContext object\"\"\" return self._data @data.setter def data(self, value: ts._ts_cpp.TenSEALContext): \"\"\"Set", "generate_galois_keys(self, secret_key: SecretKey = None): if secret_key is None: self.data.generate_galois_keys() elif isinstance(secret_key, SecretKey):", "object\"\"\" native_type = getattr(ts._ts_cpp, self.__class__.__name__) if not isinstance(value, native_type): raise TypeError(f\"value must be", "@property def auto_mod_switch(self) -> bool: return self.data.auto_mod_switch @auto_mod_switch.setter def auto_mod_switch(self, value: bool): self.data.auto_mod_switch", "low level key object\"\"\" native_type = getattr(ts._ts_cpp, self.__class__.__name__) if not isinstance(value, native_type): raise", "TypeError(f\"incorrect type: {type(secret_key)} != SecretKey\") def has_secret_key(self) -> bool: return self.data.has_secret_key() def secret_key(self)", "everything should be set prior to this call if not (isinstance(n_threads, int) and", "-> \"Context\": return self._wrap(self.data.copy()) def __copy__(self) -> \"Context\": return self.copy() @classmethod def load(cls,", "def public_key(self) -> PublicKey: return PublicKey(self.data.public_key()) def is_private(self) -> bool: return self.data.is_private() def", "elif isinstance(secret_key, SecretKey): self.data.generate_galois_keys(secret_key.data) else: raise TypeError(f\"incorrect type: {type(secret_key)} != SecretKey\") def has_relin_keys(self)", "TypeError(f\"value must be of type {native_type}\") self._data = value @classmethod def _wrap( cls,", "data: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ): \"\"\"Return a new key object", "wrapping the low level TenSEALContext object\"\"\" return cls(data=data) @property def auto_mod_switch(self) -> bool:", "TenSEALContext object\"\"\" return self._data @data.setter def data(self, value: ts._ts_cpp.TenSEALContext): \"\"\"Set the wrapped low", "the value doesn't matter for ckks plain_modulus = 0 else: raise ValueError(\"Invalid scheme", "(isinstance(n_threads, int) and n_threads > 0): n_threads = multiprocessing.cpu_count() self.data = ts._ts_cpp.TenSEALContext.new( scheme.value,", "scheme. Args: scheme : define the scheme to be used, either SCHEME_TYPE.BFV or", "Can be an empty list for BFV, a default value will be given.", "class GaloisKeys(Key): pass class RelinKeys(Key): pass class Context: def __init__( self, scheme: SCHEME_TYPE", "of bytes.\"\"\" return self.data.serialize( save_public_key, save_secret_key, save_galois_keys, save_relin_keys ) @property def global_scale(self) ->", "def auto_mod_switch(self) -> bool: return self.data.auto_mod_switch @auto_mod_switch.setter def auto_mod_switch(self, value: bool): self.data.auto_mod_switch =", "\"\"\"Construct a context that holds keys and parameters needed for operating encrypted tensors", "Context manages everything related to the encrypted computation, including keys, which optimization should", "def has_secret_key(self) -> bool: return self.data.has_secret_key() def secret_key(self) -> SecretKey: return SecretKey(self.data.secret_key()) def", "coeff_mod_bit_sizes: List[int] = [], encryption_type: ENCRYPTION_TYPE = ENCRYPTION_TYPE.ASYMMETRIC, n_threads: int = None, data:", "def __init__( self, scheme: SCHEME_TYPE = None, poly_modulus_degree: int = None, plain_modulus: int", "to differentiate between them only class SecretKey(Key): pass class PublicKey(Key): pass class GaloisKeys(Key):", "be provided\") elif scheme == SCHEME_TYPE.CKKS: # must be int, but the value", "SecretKey = None): if secret_key is None: self.data.generate_galois_keys() elif isinstance(secret_key, SecretKey): self.data.generate_galois_keys(secret_key.data) else:", "Args: data : bytes buffer from the original context. n_threads: define number of", "-> float: return self.data.global_scale @global_scale.setter def global_scale(self, value: float): self.data.global_scale = value @classmethod", "def __init__(self, data): self.data = data @property def data( self, ) -> Union[", "NONE = ts._ts_cpp.SCHEME_TYPE.NONE BFV = ts._ts_cpp.SCHEME_TYPE.BFV CKKS = ts._ts_cpp.SCHEME_TYPE.CKKS class Key(ABC): \"\"\"Wrapper class", "holds keys and parameters needed for operating encrypted tensors using either BFV or", "that can be used to decrypt values. Args: generate_galois_keys: should we generate galois-keys", "encrypted tensors using either BFV or CKKS scheme. Args: scheme : define the", "be enabled, and how many threads should run for a parallel computation. \"\"\"", "ts._ts_cpp.TenSEALContext): \"\"\"Set the wrapped low level TenSEALContext object\"\"\" native_type = ts._ts_cpp.TenSEALContext if not", "@classmethod def _wrap( cls, data: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ): \"\"\"Return", "return self.data.auto_rescale @auto_rescale.setter def auto_rescale(self, value: bool): self.data.auto_rescale = value def has_galois_keys(self) ->", "RelinKeys(self.data.relin_keys()) def generate_relin_keys(self, secret_key: SecretKey = None): if secret_key is None: self.data.generate_relin_keys() elif", "int = None, data: ts._ts_cpp.TenSEALContext = None, ): \"\"\"Construct a context that holds", "has_galois_keys(self) -> bool: return self.data.has_galois_keys() def galois_keys(self) -> GaloisKeys: return GaloisKeys(self.data.galois_keys()) def generate_galois_keys(self,", "value: bool): self.data.auto_mod_switch = value @property def auto_relin(self) -> bool: return self.data.auto_relin @auto_relin.setter", "SecretKey(self.data.secret_key()) def has_public_key(self) -> bool: return self.data.has_public_key() def public_key(self) -> PublicKey: return PublicKey(self.data.public_key())", "define number of threads that shall be later used for parallel computation. data:", "generate_relin_keys: should we generate relin-keys before dropping the secret-key? \"\"\" self.data.make_context_public( generate_galois_keys=generate_galois_keys, generate_relin_keys=generate_relin_keys", "data( self, ) -> Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ]: \"\"\"Get the wrapped", "parameters needed for operating encrypted tensors using either BFV or CKKS scheme. Args:", "bool: return self.data.has_galois_keys() def galois_keys(self) -> GaloisKeys: return GaloisKeys(self.data.galois_keys()) def generate_galois_keys(self, secret_key: SecretKey", "encryption_type: ENCRYPTION_TYPE = ENCRYPTION_TYPE.ASYMMETRIC, n_threads: int = None, data: ts._ts_cpp.TenSEALContext = None, ):", "not isinstance(value, native_type): raise TypeError(f\"value must be of type {native_type}\") self._data = value", "int = None, plain_modulus: int = None, coeff_mod_bit_sizes: List[int] = [], encryption_type: ENCRYPTION_TYPE", "n_threads)) return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data)) def serialize( self, save_public_key: bool = True, save_secret_key: bool =", "= None): if secret_key is None: self.data.generate_galois_keys() elif isinstance(secret_key, SecretKey): self.data.generate_galois_keys(secret_key.data) else: raise", "Should not be passed when the scheme is CKKS. coeff_mod_bit_sizes: List of bit", "@data.setter def data(self, value: ts._ts_cpp.TenSEALContext): \"\"\"Set the wrapped low level TenSEALContext object\"\"\" native_type", "if it's passed. Returns: A Context object. \"\"\" # wrapping if data is", "ValueError(\"plain_modulus must be provided\") elif scheme == SCHEME_TYPE.CKKS: # must be int, but", "given. encryption_type : define the encryption type to be used, either ENCRYPTION_TYPE.ASYMMETRIC, or", "keys, which optimization should be enabled, and how many threads should run for", "a power of two. plain_modulus: The plaintext modulus. Should not be passed when", "bool: return self.data.has_public_key() def public_key(self) -> PublicKey: return PublicKey(self.data.public_key()) def is_private(self) -> bool:", "ENCRYPTION_TYPE = ENCRYPTION_TYPE.ASYMMETRIC, n_threads: int = None, data: ts._ts_cpp.TenSEALContext = None, ): \"\"\"Construct", "object\"\"\" native_type = ts._ts_cpp.TenSEALContext if not isinstance(value, native_type): raise TypeError(f\"value must be of", "encrypted computation, including keys, which optimization should be enabled, and how many threads", "is useful before sending the context for remote computation, as we don't want", "auto_relin(self) -> bool: return self.data.auto_relin @auto_relin.setter def auto_relin(self, value: bool): self.data.auto_relin = value", "ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ): \"\"\"Return a new key object wrapping the", "relin_keys(self) -> RelinKeys: return RelinKeys(self.data.relin_keys()) def generate_relin_keys(self, secret_key: SecretKey = None): if secret_key", "is None: raise ValueError(\"plain_modulus must be provided\") elif scheme == SCHEME_TYPE.CKKS: # must", "-> bool: return self.data.auto_mod_switch @auto_mod_switch.setter def auto_mod_switch(self, value: bool): self.data.auto_mod_switch = value @property", "= None, ): \"\"\"Construct a context that holds keys and parameters needed for", "level TenSEALContext object\"\"\" return self._data @data.setter def data(self, value: ts._ts_cpp.TenSEALContext): \"\"\"Set the wrapped", "> 0): n_threads = multiprocessing.cpu_count() self.data = ts._ts_cpp.TenSEALContext.new( scheme.value, poly_modulus_degree, plain_modulus, coeff_mod_bit_sizes, encryption_type.value,", "= True, ) -> bytes: \"\"\"Serialize the context into a stream of bytes.\"\"\"", "should be enabled, and how many threads should run for a parallel computation.", "this call if not (isinstance(n_threads, int) and n_threads > 0): n_threads = multiprocessing.cpu_count()", "None) -> \"Context\": \"\"\"Construct a context from a serialized buffer. Args: data :", "@property def global_scale(self) -> float: return self.data.global_scale @global_scale.setter def global_scale(self, value: float): self.data.global_scale", "def secret_key(self) -> SecretKey: return SecretKey(self.data.secret_key()) def has_public_key(self) -> bool: return self.data.has_public_key() def", "getattr(ts._ts_cpp, self.__class__.__name__) if not isinstance(value, native_type): raise TypeError(f\"value must be of type {native_type}\")", "be later used for parallel computation. Returns: A Context object. \"\"\" if n_threads:", "= False, save_galois_keys: bool = True, save_relin_keys: bool = True, ) -> bytes:", "not be passed when the scheme is CKKS. coeff_mod_bit_sizes: List of bit size", "\"\"\"Get the wrapped low level TenSEALContext object\"\"\" return self._data @data.setter def data(self, value:", "should be set prior to this call if not (isinstance(n_threads, int) and n_threads", "class RelinKeys(Key): pass class Context: def __init__( self, scheme: SCHEME_TYPE = None, poly_modulus_degree:", "serialized buffer. Args: data : bytes buffer from the original context. n_threads: define", "parallel computation. Returns: A Context object. \"\"\" if n_threads: return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data, n_threads)) return", "def _wrap(cls, data: ts._ts_cpp.TenSEALContext) -> \"Context\": \"\"\"Return a new Context object wrapping the", "= data @property def data( self, ) -> Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys", "the encryption type to be used, either ENCRYPTION_TYPE.ASYMMETRIC, or ENCRYPTION_TYPE.SYMMETRIC. n_threads: define number", "new object if it's passed. Returns: A Context object. \"\"\" # wrapping if", "self.data.auto_relin = value @property def auto_rescale(self) -> bool: return self.data.auto_rescale @auto_rescale.setter def auto_rescale(self,", "from a serialized buffer. Args: data : bytes buffer from the original context.", "self.data.has_public_key() def public_key(self) -> PublicKey: return PublicKey(self.data.public_key()) def is_private(self) -> bool: return self.data.is_private()", "degree of the polynomial modulus, must be a power of two. plain_modulus: The", "scheme : define the scheme to be used, either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS. poly_modulus_degree:", "self._data = value @classmethod def _wrap( cls, data: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys", "new Context object wrapping the low level TenSEALContext object\"\"\" return cls(data=data) @property def", "manages everything related to the encrypted computation, including keys, which optimization should be", "or SCHEME_TYPE.CKKS\") # We can't pass None here, everything should be set prior", "def auto_rescale(self) -> bool: return self.data.auto_rescale @auto_rescale.setter def auto_rescale(self, value: bool): self.data.auto_rescale =", "can be used to decrypt values. Args: generate_galois_keys: should we generate galois-keys before", "should we generate galois-keys before dropping the secret-key? generate_relin_keys: should we generate relin-keys", "for ckks plain_modulus = 0 else: raise ValueError(\"Invalid scheme type, use either SCHEME_TYPE.BFV", "ts._ts_cpp.TenSEALContext: \"\"\"Get the wrapped low level TenSEALContext object\"\"\" return self._data @data.setter def data(self,", "we don't want to send the secret-key that can be used to decrypt", "= None, data: ts._ts_cpp.TenSEALContext = None, ): \"\"\"Construct a context that holds keys", "the original context. n_threads: define number of threads that shall be later used", "= [], encryption_type: ENCRYPTION_TYPE = ENCRYPTION_TYPE.ASYMMETRIC, n_threads: int = None, data: ts._ts_cpp.TenSEALContext =", "object. \"\"\" if n_threads: return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data, n_threads)) return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data)) def serialize( self, save_public_key:", "= value def has_galois_keys(self) -> bool: return self.data.has_galois_keys() def galois_keys(self) -> GaloisKeys: return", "either BFV or CKKS scheme. Args: scheme : define the scheme to be", "scheme type, use either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS\") # We can't pass None here,", "def data( self, ) -> Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ]: \"\"\"Get the", "TypeError(f\"value must be of type {native_type}\") self._data = value def copy(self) -> \"Context\":", "ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ): \"\"\"Return a new key object wrapping the low", "Union from abc import ABC import tenseal as ts class ENCRYPTION_TYPE(Enum): ASYMMETRIC =", "plain_modulus: int = None, coeff_mod_bit_sizes: List[int] = [], encryption_type: ENCRYPTION_TYPE = ENCRYPTION_TYPE.ASYMMETRIC, n_threads:", "every key type, to differentiate between them only class SecretKey(Key): pass class PublicKey(Key):", "return self.data.auto_mod_switch @auto_mod_switch.setter def auto_mod_switch(self, value: bool): self.data.auto_mod_switch = value @property def auto_relin(self)", "return GaloisKeys(self.data.galois_keys()) def generate_galois_keys(self, secret_key: SecretKey = None): if secret_key is None: self.data.generate_galois_keys()", "multiprocessing.cpu_count() self.data = ts._ts_cpp.TenSEALContext.new( scheme.value, poly_modulus_degree, plain_modulus, coeff_mod_bit_sizes, encryption_type.value, n_threads, ) @property def", "We have a class for every key type, to differentiate between them only", "for BFV, a default value will be given. encryption_type : define the encryption", "low level key object\"\"\" return self._data @data.setter def data( self, value: Union[ ts._ts_cpp.PublicKey,", "A Context object. \"\"\" # wrapping if data is not None: self.data =", "keys\"\"\" def __init__(self, data): self.data = data @property def data( self, ) ->", "ts._ts_cpp.RelinKeys ], ): \"\"\"Return a new key object wrapping the low level key", "RelinKeys(Key): pass class Context: def __init__( self, scheme: SCHEME_TYPE = None, poly_modulus_degree: int", "-> bool: return self.data.has_relin_keys() def relin_keys(self) -> RelinKeys: return RelinKeys(self.data.relin_keys()) def generate_relin_keys(self, secret_key:", "for a parallel computation. \"\"\" import multiprocessing from enum import Enum from typing", "from typing import List, Union from abc import ABC import tenseal as ts", "-> Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ]: \"\"\"Get the wrapped low level key", "multiprocessing from enum import Enum from typing import List, Union from abc import", "bool = True, ) -> bytes: \"\"\"Serialize the context into a stream of", "True, save_relin_keys: bool = True, ) -> bytes: \"\"\"Serialize the context into a", "from the context. This is useful before sending the context for remote computation,", "= ts._ts_cpp.SCHEME_TYPE.CKKS class Key(ABC): \"\"\"Wrapper class for encryption keys\"\"\" def __init__(self, data): self.data", "None: raise ValueError(\"plain_modulus must be provided\") elif scheme == SCHEME_TYPE.CKKS: # must be", "= value @classmethod def _wrap(cls, data: ts._ts_cpp.TenSEALContext) -> \"Context\": \"\"\"Return a new Context", "def is_private(self) -> bool: return self.data.is_private() def is_public(self) -> bool: return self.data.is_public() def", "= value def copy(self) -> \"Context\": return self._wrap(self.data.copy()) def __copy__(self) -> \"Context\": return", "n_threads: int = None) -> \"Context\": \"\"\"Construct a context from a serialized buffer.", "be passed when the scheme is CKKS. coeff_mod_bit_sizes: List of bit size for", "bytes: \"\"\"Serialize the context into a stream of bytes.\"\"\" return self.data.serialize( save_public_key, save_secret_key,", "threads should run for a parallel computation. \"\"\" import multiprocessing from enum import", "import Enum from typing import List, Union from abc import ABC import tenseal", "self.__class__.__name__) if not isinstance(value, native_type): raise TypeError(f\"value must be of type {native_type}\") self._data", "return cls(data) # We have a class for every key type, to differentiate", "isinstance(secret_key, SecretKey): self.data.generate_galois_keys(secret_key.data) else: raise TypeError(f\"incorrect type: {type(secret_key)} != SecretKey\") def has_relin_keys(self) ->", "SecretKey = None): if secret_key is None: self.data.generate_relin_keys() elif isinstance(secret_key, SecretKey): self.data.generate_relin_keys(secret_key.data) else:", "key object\"\"\" native_type = getattr(ts._ts_cpp, self.__class__.__name__) if not isinstance(value, native_type): raise TypeError(f\"value must", "native_type = getattr(ts._ts_cpp, self.__class__.__name__) if not isinstance(value, native_type): raise TypeError(f\"value must be of", "self.data.has_secret_key() def secret_key(self) -> SecretKey: return SecretKey(self.data.secret_key()) def has_public_key(self) -> bool: return self.data.has_public_key()", "context. This is useful before sending the context for remote computation, as we", "bool): self.data.auto_rescale = value def has_galois_keys(self) -> bool: return self.data.has_galois_keys() def galois_keys(self) ->", "encryption_type.value, n_threads, ) @property def data(self) -> ts._ts_cpp.TenSEALContext: \"\"\"Get the wrapped low level", "return self._wrap(self.data.copy()) def __copy__(self) -> \"Context\": return self.copy() @classmethod def load(cls, data: bytes,", "save_relin_keys: bool = True, ) -> bytes: \"\"\"Serialize the context into a stream", "bool = False, generate_relin_keys: bool = False ): \"\"\"Drop secret part from the", "self, generate_galois_keys: bool = False, generate_relin_keys: bool = False ): \"\"\"Drop secret part", "the secret-key that can be used to decrypt values. Args: generate_galois_keys: should we", "[], encryption_type: ENCRYPTION_TYPE = ENCRYPTION_TYPE.ASYMMETRIC, n_threads: int = None, data: ts._ts_cpp.TenSEALContext = None,", "class Context: def __init__( self, scheme: SCHEME_TYPE = None, poly_modulus_degree: int = None,", "return SecretKey(self.data.secret_key()) def has_public_key(self) -> bool: return self.data.has_public_key() def public_key(self) -> PublicKey: return", "data return # constructing a new object if scheme == SCHEME_TYPE.BFV: if plain_modulus", "): \"\"\"Return a new key object wrapping the low level key object\"\"\" return", "value def copy(self) -> \"Context\": return self._wrap(self.data.copy()) def __copy__(self) -> \"Context\": return self.copy()", "SecretKey): self.data.generate_relin_keys(secret_key.data) else: raise TypeError(f\"incorrect type: {type(secret_key)} != SecretKey\") def has_secret_key(self) -> bool:", "the secret-key? generate_relin_keys: should we generate relin-keys before dropping the secret-key? \"\"\" self.data.make_context_public(", "def make_context_public( self, generate_galois_keys: bool = False, generate_relin_keys: bool = False ): \"\"\"Drop", "return self.data.serialize( save_public_key, save_secret_key, save_galois_keys, save_relin_keys ) @property def global_scale(self) -> float: return", "ABC import tenseal as ts class ENCRYPTION_TYPE(Enum): ASYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.ASYMMETRIC SYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.SYMMETRIC", "return self.data.has_relin_keys() def relin_keys(self) -> RelinKeys: return RelinKeys(self.data.relin_keys()) def generate_relin_keys(self, secret_key: SecretKey =", "return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data)) def serialize( self, save_public_key: bool = True, save_secret_key: bool = False,", "object\"\"\" return self._data @data.setter def data(self, value: ts._ts_cpp.TenSEALContext): \"\"\"Set the wrapped low level", "generate_galois_keys: bool = False, generate_relin_keys: bool = False ): \"\"\"Drop secret part from", "ts._ts_cpp.TenSEALContext.new( scheme.value, poly_modulus_degree, plain_modulus, coeff_mod_bit_sizes, encryption_type.value, n_threads, ) @property def data(self) -> ts._ts_cpp.TenSEALContext:", "useful before sending the context for remote computation, as we don't want to", "is not None: self.data = data return # constructing a new object if", "-> \"Context\": \"\"\"Construct a context from a serialized buffer. Args: data : bytes", "ts._ts_cpp.ENCRYPTION_TYPE.ASYMMETRIC SYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.SYMMETRIC class SCHEME_TYPE(Enum): NONE = ts._ts_cpp.SCHEME_TYPE.NONE BFV = ts._ts_cpp.SCHEME_TYPE.BFV CKKS", "ts._ts_cpp.RelinKeys ]: \"\"\"Get the wrapped low level key object\"\"\" return self._data @data.setter def", "self.data.serialize( save_public_key, save_secret_key, save_galois_keys, save_relin_keys ) @property def global_scale(self) -> float: return self.data.global_scale", "@auto_mod_switch.setter def auto_mod_switch(self, value: bool): self.data.auto_mod_switch = value @property def auto_relin(self) -> bool:", "poly_modulus_degree: The degree of the polynomial modulus, must be a power of two.", "level TenSEALContext object\"\"\" return cls(data=data) @property def auto_mod_switch(self) -> bool: return self.data.auto_mod_switch @auto_mod_switch.setter", "-> bool: return self.data.auto_relin @auto_relin.setter def auto_relin(self, value: bool): self.data.auto_relin = value @property", "List[int] = [], encryption_type: ENCRYPTION_TYPE = ENCRYPTION_TYPE.ASYMMETRIC, n_threads: int = None, data: ts._ts_cpp.TenSEALContext", "List of bit size for each coeffecient modulus. Can be an empty list", "= ts._ts_cpp.ENCRYPTION_TYPE.ASYMMETRIC SYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.SYMMETRIC class SCHEME_TYPE(Enum): NONE = ts._ts_cpp.SCHEME_TYPE.NONE BFV = ts._ts_cpp.SCHEME_TYPE.BFV", "isinstance(value, native_type): raise TypeError(f\"value must be of type {native_type}\") self._data = value def", "value @property def auto_relin(self) -> bool: return self.data.auto_relin @auto_relin.setter def auto_relin(self, value: bool):", "type, use either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS\") # We can't pass None here, everything", "n_threads: int = None, data: ts._ts_cpp.TenSEALContext = None, ): \"\"\"Construct a context that", "# wrapping if data is not None: self.data = data return # constructing", "but the value doesn't matter for ckks plain_modulus = 0 else: raise ValueError(\"Invalid", "False ): \"\"\"Drop secret part from the context. This is useful before sending", "None here, everything should be set prior to this call if not (isinstance(n_threads,", "generate_galois_keys: should we generate galois-keys before dropping the secret-key? generate_relin_keys: should we generate", "generate_relin_keys(self, secret_key: SecretKey = None): if secret_key is None: self.data.generate_relin_keys() elif isinstance(secret_key, SecretKey):", "== SCHEME_TYPE.CKKS: # must be int, but the value doesn't matter for ckks", "None): if secret_key is None: self.data.generate_relin_keys() elif isinstance(secret_key, SecretKey): self.data.generate_relin_keys(secret_key.data) else: raise TypeError(f\"incorrect", "self.copy() @classmethod def load(cls, data: bytes, n_threads: int = None) -> \"Context\": \"\"\"Construct", "_wrap(cls, data: ts._ts_cpp.TenSEALContext) -> \"Context\": \"\"\"Return a new Context object wrapping the low", "self.data.generate_relin_keys(secret_key.data) else: raise TypeError(f\"incorrect type: {type(secret_key)} != SecretKey\") def has_secret_key(self) -> bool: return", ": define the scheme to be used, either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS. poly_modulus_degree: The", "used, either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS. poly_modulus_degree: The degree of the polynomial modulus, must", "ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ]: \"\"\"Get the wrapped low level key object\"\"\" return self._data", "scheme to be used, either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS. poly_modulus_degree: The degree of the", "PublicKey: return PublicKey(self.data.public_key()) def is_private(self) -> bool: return self.data.is_private() def is_public(self) -> bool:", "define the scheme to be used, either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS. poly_modulus_degree: The degree", "Returns: A Context object. \"\"\" if n_threads: return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data, n_threads)) return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data)) def", "be used to decrypt values. Args: generate_galois_keys: should we generate galois-keys before dropping", "global_scale(self) -> float: return self.data.global_scale @global_scale.setter def global_scale(self, value: float): self.data.global_scale = value", "type: {type(secret_key)} != SecretKey\") def has_relin_keys(self) -> bool: return self.data.has_relin_keys() def relin_keys(self) ->", "<gh_stars>0 \"\"\"The Context manages everything related to the encrypted computation, including keys, which", "= ts._ts_cpp.SCHEME_TYPE.NONE BFV = ts._ts_cpp.SCHEME_TYPE.BFV CKKS = ts._ts_cpp.SCHEME_TYPE.CKKS class Key(ABC): \"\"\"Wrapper class for", "@data.setter def data( self, value: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ): \"\"\"Set", "return # constructing a new object if scheme == SCHEME_TYPE.BFV: if plain_modulus is", "{type(secret_key)} != SecretKey\") def has_relin_keys(self) -> bool: return self.data.has_relin_keys() def relin_keys(self) -> RelinKeys:", "key object wrapping the low level key object\"\"\" return cls(data) # We have", "secret-key? generate_relin_keys: should we generate relin-keys before dropping the secret-key? \"\"\" self.data.make_context_public( generate_galois_keys=generate_galois_keys,", "wrapped low level key object\"\"\" return self._data @data.setter def data( self, value: Union[", "global_scale(self, value: float): self.data.global_scale = value @classmethod def _wrap(cls, data: ts._ts_cpp.TenSEALContext) -> \"Context\":", "sending the context for remote computation, as we don't want to send the", "is CKKS. coeff_mod_bit_sizes: List of bit size for each coeffecient modulus. Can be", "using either BFV or CKKS scheme. Args: scheme : define the scheme to", "!= SecretKey\") def has_relin_keys(self) -> bool: return self.data.has_relin_keys() def relin_keys(self) -> RelinKeys: return", "enabled, and how many threads should run for a parallel computation. \"\"\" import", "-> bool: return self.data.is_public() def make_context_public( self, generate_galois_keys: bool = False, generate_relin_keys: bool", "-> \"Context\": \"\"\"Return a new Context object wrapping the low level TenSEALContext object\"\"\"", "def auto_mod_switch(self, value: bool): self.data.auto_mod_switch = value @property def auto_relin(self) -> bool: return", "self.data.generate_relin_keys() elif isinstance(secret_key, SecretKey): self.data.generate_relin_keys(secret_key.data) else: raise TypeError(f\"incorrect type: {type(secret_key)} != SecretKey\") def", "ts._ts_cpp.ENCRYPTION_TYPE.SYMMETRIC class SCHEME_TYPE(Enum): NONE = ts._ts_cpp.SCHEME_TYPE.NONE BFV = ts._ts_cpp.SCHEME_TYPE.BFV CKKS = ts._ts_cpp.SCHEME_TYPE.CKKS class", "{native_type}\") self._data = value def copy(self) -> \"Context\": return self._wrap(self.data.copy()) def __copy__(self) ->", "False, save_galois_keys: bool = True, save_relin_keys: bool = True, ) -> bytes: \"\"\"Serialize", "type, to differentiate between them only class SecretKey(Key): pass class PublicKey(Key): pass class", "encryption keys\"\"\" def __init__(self, data): self.data = data @property def data( self, )", "The degree of the polynomial modulus, must be a power of two. plain_modulus:", "object wrapping the low level TenSEALContext object\"\"\" return cls(data=data) @property def auto_mod_switch(self) ->", "@classmethod def _wrap(cls, data: ts._ts_cpp.TenSEALContext) -> \"Context\": \"\"\"Return a new Context object wrapping", "enum import Enum from typing import List, Union from abc import ABC import", "_wrap( cls, data: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ): \"\"\"Return a new", "values. Args: generate_galois_keys: should we generate galois-keys before dropping the secret-key? generate_relin_keys: should", "Context: def __init__( self, scheme: SCHEME_TYPE = None, poly_modulus_degree: int = None, plain_modulus:", "n_threads = multiprocessing.cpu_count() self.data = ts._ts_cpp.TenSEALContext.new( scheme.value, poly_modulus_degree, plain_modulus, coeff_mod_bit_sizes, encryption_type.value, n_threads, )", "keys and parameters needed for operating encrypted tensors using either BFV or CKKS", "original context. n_threads: define number of threads that shall be later used for", "the context. This is useful before sending the context for remote computation, as", "@property def auto_rescale(self) -> bool: return self.data.auto_rescale @auto_rescale.setter def auto_rescale(self, value: bool): self.data.auto_rescale", "to be used, either ENCRYPTION_TYPE.ASYMMETRIC, or ENCRYPTION_TYPE.SYMMETRIC. n_threads: define number of threads that", "raise TypeError(f\"value must be of type {native_type}\") self._data = value @classmethod def _wrap(", "low level TenSEALContext object\"\"\" native_type = ts._ts_cpp.TenSEALContext if not isinstance(value, native_type): raise TypeError(f\"value", "coeffecient modulus. Can be an empty list for BFV, a default value will", "ENCRYPTION_TYPE.ASYMMETRIC, or ENCRYPTION_TYPE.SYMMETRIC. n_threads: define number of threads that shall be later used", "): \"\"\"Set the wrapped low level key object\"\"\" native_type = getattr(ts._ts_cpp, self.__class__.__name__) if", "Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ): \"\"\"Set the wrapped low level key", "two. plain_modulus: The plaintext modulus. Should not be passed when the scheme is", "return RelinKeys(self.data.relin_keys()) def generate_relin_keys(self, secret_key: SecretKey = None): if secret_key is None: self.data.generate_relin_keys()", "for encryption keys\"\"\" def __init__(self, data): self.data = data @property def data( self,", "bool: return self.data.has_relin_keys() def relin_keys(self) -> RelinKeys: return RelinKeys(self.data.relin_keys()) def generate_relin_keys(self, secret_key: SecretKey", "ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ): \"\"\"Set the wrapped low level key object\"\"\"", "data: A TenSEALContext to wrap. We won't construct a new object if it's", "ts._ts_cpp.SCHEME_TYPE.NONE BFV = ts._ts_cpp.SCHEME_TYPE.BFV CKKS = ts._ts_cpp.SCHEME_TYPE.CKKS class Key(ABC): \"\"\"Wrapper class for encryption", "a new Context object wrapping the low level TenSEALContext object\"\"\" return cls(data=data) @property", "\"Context\": return self._wrap(self.data.copy()) def __copy__(self) -> \"Context\": return self.copy() @classmethod def load(cls, data:", "False, generate_relin_keys: bool = False ): \"\"\"Drop secret part from the context. This", "scheme: SCHEME_TYPE = None, poly_modulus_degree: int = None, plain_modulus: int = None, coeff_mod_bit_sizes:", "level TenSEALContext object\"\"\" native_type = ts._ts_cpp.TenSEALContext if not isinstance(value, native_type): raise TypeError(f\"value must", "object\"\"\" return cls(data=data) @property def auto_mod_switch(self) -> bool: return self.data.auto_mod_switch @auto_mod_switch.setter def auto_mod_switch(self,", "level key object\"\"\" native_type = getattr(ts._ts_cpp, self.__class__.__name__) if not isinstance(value, native_type): raise TypeError(f\"value", "We won't construct a new object if it's passed. Returns: A Context object.", "plain_modulus: The plaintext modulus. Should not be passed when the scheme is CKKS.", "of threads that shall be later used for parallel computation. data: A TenSEALContext", "self.data.global_scale = value @classmethod def _wrap(cls, data: ts._ts_cpp.TenSEALContext) -> \"Context\": \"\"\"Return a new", "type to be used, either ENCRYPTION_TYPE.ASYMMETRIC, or ENCRYPTION_TYPE.SYMMETRIC. n_threads: define number of threads", "-> ts._ts_cpp.TenSEALContext: \"\"\"Get the wrapped low level TenSEALContext object\"\"\" return self._data @data.setter def", "poly_modulus_degree: int = None, plain_modulus: int = None, coeff_mod_bit_sizes: List[int] = [], encryption_type:", "SecretKey\") def has_relin_keys(self) -> bool: return self.data.has_relin_keys() def relin_keys(self) -> RelinKeys: return RelinKeys(self.data.relin_keys())", "if data is not None: self.data = data return # constructing a new", "from abc import ABC import tenseal as ts class ENCRYPTION_TYPE(Enum): ASYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.ASYMMETRIC", "pass class PublicKey(Key): pass class GaloisKeys(Key): pass class RelinKeys(Key): pass class Context: def", "list for BFV, a default value will be given. encryption_type : define the", "number of threads that shall be later used for parallel computation. Returns: A", "return self.data.auto_relin @auto_relin.setter def auto_relin(self, value: bool): self.data.auto_relin = value @property def auto_rescale(self)", "value: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ): \"\"\"Set the wrapped low level", "self.data.auto_relin @auto_relin.setter def auto_relin(self, value: bool): self.data.auto_relin = value @property def auto_rescale(self) ->", "return self.data.has_secret_key() def secret_key(self) -> SecretKey: return SecretKey(self.data.secret_key()) def has_public_key(self) -> bool: return", "n_threads: return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data, n_threads)) return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data)) def serialize( self, save_public_key: bool = True,", "True, ) -> bytes: \"\"\"Serialize the context into a stream of bytes.\"\"\" return", "raise TypeError(f\"incorrect type: {type(secret_key)} != SecretKey\") def has_relin_keys(self) -> bool: return self.data.has_relin_keys() def", "= True, save_secret_key: bool = False, save_galois_keys: bool = True, save_relin_keys: bool =", "level key object\"\"\" return self._data @data.setter def data( self, value: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey,", "Context object. \"\"\" if n_threads: return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data, n_threads)) return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data)) def serialize( self,", "= True, save_relin_keys: bool = True, ) -> bytes: \"\"\"Serialize the context into", "is None: self.data.generate_galois_keys() elif isinstance(secret_key, SecretKey): self.data.generate_galois_keys(secret_key.data) else: raise TypeError(f\"incorrect type: {type(secret_key)} !=", "# We can't pass None here, everything should be set prior to this", "def galois_keys(self) -> GaloisKeys: return GaloisKeys(self.data.galois_keys()) def generate_galois_keys(self, secret_key: SecretKey = None): if", "everything related to the encrypted computation, including keys, which optimization should be enabled,", "must be of type {native_type}\") self._data = value @classmethod def _wrap( cls, data:", "computation. \"\"\" import multiprocessing from enum import Enum from typing import List, Union", "Returns: A Context object. \"\"\" # wrapping if data is not None: self.data", "# constructing a new object if scheme == SCHEME_TYPE.BFV: if plain_modulus is None:", "self.data.auto_mod_switch @auto_mod_switch.setter def auto_mod_switch(self, value: bool): self.data.auto_mod_switch = value @property def auto_relin(self) ->", "def has_public_key(self) -> bool: return self.data.has_public_key() def public_key(self) -> PublicKey: return PublicKey(self.data.public_key()) def", "wrapped low level key object\"\"\" native_type = getattr(ts._ts_cpp, self.__class__.__name__) if not isinstance(value, native_type):", "must be a power of two. plain_modulus: The plaintext modulus. Should not be", "to be used, either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS. poly_modulus_degree: The degree of the polynomial", "bytes.\"\"\" return self.data.serialize( save_public_key, save_secret_key, save_galois_keys, save_relin_keys ) @property def global_scale(self) -> float:", "bytes buffer from the original context. n_threads: define number of threads that shall", "return self.data.has_galois_keys() def galois_keys(self) -> GaloisKeys: return GaloisKeys(self.data.galois_keys()) def generate_galois_keys(self, secret_key: SecretKey =", "when the scheme is CKKS. coeff_mod_bit_sizes: List of bit size for each coeffecient", "from enum import Enum from typing import List, Union from abc import ABC", "has_secret_key(self) -> bool: return self.data.has_secret_key() def secret_key(self) -> SecretKey: return SecretKey(self.data.secret_key()) def has_public_key(self)", "self._data @data.setter def data(self, value: ts._ts_cpp.TenSEALContext): \"\"\"Set the wrapped low level TenSEALContext object\"\"\"", "not None: self.data = data return # constructing a new object if scheme", "data(self, value: ts._ts_cpp.TenSEALContext): \"\"\"Set the wrapped low level TenSEALContext object\"\"\" native_type = ts._ts_cpp.TenSEALContext", "CKKS = ts._ts_cpp.SCHEME_TYPE.CKKS class Key(ABC): \"\"\"Wrapper class for encryption keys\"\"\" def __init__(self, data):", "return self.data.has_public_key() def public_key(self) -> PublicKey: return PublicKey(self.data.public_key()) def is_private(self) -> bool: return", "used for parallel computation. data: A TenSEALContext to wrap. We won't construct a", "self.data = data @property def data( self, ) -> Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys,", "float): self.data.global_scale = value @classmethod def _wrap(cls, data: ts._ts_cpp.TenSEALContext) -> \"Context\": \"\"\"Return a", "value: float): self.data.global_scale = value @classmethod def _wrap(cls, data: ts._ts_cpp.TenSEALContext) -> \"Context\": \"\"\"Return", "= ts._ts_cpp.SCHEME_TYPE.BFV CKKS = ts._ts_cpp.SCHEME_TYPE.CKKS class Key(ABC): \"\"\"Wrapper class for encryption keys\"\"\" def", "be used, either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS. poly_modulus_degree: The degree of the polynomial modulus,", "to wrap. We won't construct a new object if it's passed. Returns: A", "self.data.generate_galois_keys() elif isinstance(secret_key, SecretKey): self.data.generate_galois_keys(secret_key.data) else: raise TypeError(f\"incorrect type: {type(secret_key)} != SecretKey\") def", "only class SecretKey(Key): pass class PublicKey(Key): pass class GaloisKeys(Key): pass class RelinKeys(Key): pass", "will be given. encryption_type : define the encryption type to be used, either", "raise TypeError(f\"value must be of type {native_type}\") self._data = value def copy(self) ->", "class Key(ABC): \"\"\"Wrapper class for encryption keys\"\"\" def __init__(self, data): self.data = data", "computation. data: A TenSEALContext to wrap. We won't construct a new object if", "scheme is CKKS. coeff_mod_bit_sizes: List of bit size for each coeffecient modulus. Can", "ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ): \"\"\"Set the wrapped low level key object\"\"\" native_type =", "and n_threads > 0): n_threads = multiprocessing.cpu_count() self.data = ts._ts_cpp.TenSEALContext.new( scheme.value, poly_modulus_degree, plain_modulus,", "define number of threads that shall be later used for parallel computation. Returns:", "dropping the secret-key? generate_relin_keys: should we generate relin-keys before dropping the secret-key? \"\"\"", "data @property def data( self, ) -> Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ]:", "if scheme == SCHEME_TYPE.BFV: if plain_modulus is None: raise ValueError(\"plain_modulus must be provided\")", "if not (isinstance(n_threads, int) and n_threads > 0): n_threads = multiprocessing.cpu_count() self.data =", "ts._ts_cpp.SCHEME_TYPE.CKKS class Key(ABC): \"\"\"Wrapper class for encryption keys\"\"\" def __init__(self, data): self.data =", "self._data @data.setter def data( self, value: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ):", "isinstance(secret_key, SecretKey): self.data.generate_relin_keys(secret_key.data) else: raise TypeError(f\"incorrect type: {type(secret_key)} != SecretKey\") def has_secret_key(self) ->", "SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS. poly_modulus_degree: The degree of the polynomial modulus, must be a", "value will be given. encryption_type : define the encryption type to be used,", "= 0 else: raise ValueError(\"Invalid scheme type, use either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS\") #", "-> GaloisKeys: return GaloisKeys(self.data.galois_keys()) def generate_galois_keys(self, secret_key: SecretKey = None): if secret_key is", "the context for remote computation, as we don't want to send the secret-key", "the polynomial modulus, must be a power of two. plain_modulus: The plaintext modulus.", "plain_modulus is None: raise ValueError(\"plain_modulus must be provided\") elif scheme == SCHEME_TYPE.CKKS: #", "-> bool: return self.data.is_private() def is_public(self) -> bool: return self.data.is_public() def make_context_public( self,", "bytes, n_threads: int = None) -> \"Context\": \"\"\"Construct a context from a serialized", "def global_scale(self, value: float): self.data.global_scale = value @classmethod def _wrap(cls, data: ts._ts_cpp.TenSEALContext) ->", "must be provided\") elif scheme == SCHEME_TYPE.CKKS: # must be int, but the", "data: bytes, n_threads: int = None) -> \"Context\": \"\"\"Construct a context from a", "0): n_threads = multiprocessing.cpu_count() self.data = ts._ts_cpp.TenSEALContext.new( scheme.value, poly_modulus_degree, plain_modulus, coeff_mod_bit_sizes, encryption_type.value, n_threads,", "@global_scale.setter def global_scale(self, value: float): self.data.global_scale = value @classmethod def _wrap(cls, data: ts._ts_cpp.TenSEALContext)", "context from a serialized buffer. Args: data : bytes buffer from the original", "\"\"\" import multiprocessing from enum import Enum from typing import List, Union from", "def data(self) -> ts._ts_cpp.TenSEALContext: \"\"\"Get the wrapped low level TenSEALContext object\"\"\" return self._data", "# must be int, but the value doesn't matter for ckks plain_modulus =", "value @classmethod def _wrap(cls, data: ts._ts_cpp.TenSEALContext) -> \"Context\": \"\"\"Return a new Context object", "isinstance(value, native_type): raise TypeError(f\"value must be of type {native_type}\") self._data = value @classmethod", "should run for a parallel computation. \"\"\" import multiprocessing from enum import Enum", "self.data.generate_galois_keys(secret_key.data) else: raise TypeError(f\"incorrect type: {type(secret_key)} != SecretKey\") def has_relin_keys(self) -> bool: return", "new key object wrapping the low level key object\"\"\" return cls(data) # We", "object if scheme == SCHEME_TYPE.BFV: if plain_modulus is None: raise ValueError(\"plain_modulus must be", "auto_mod_switch(self, value: bool): self.data.auto_mod_switch = value @property def auto_relin(self) -> bool: return self.data.auto_relin", "optimization should be enabled, and how many threads should run for a parallel", "public_key(self) -> PublicKey: return PublicKey(self.data.public_key()) def is_private(self) -> bool: return self.data.is_private() def is_public(self)", "SecretKey): self.data.generate_galois_keys(secret_key.data) else: raise TypeError(f\"incorrect type: {type(secret_key)} != SecretKey\") def has_relin_keys(self) -> bool:", "return PublicKey(self.data.public_key()) def is_private(self) -> bool: return self.data.is_private() def is_public(self) -> bool: return", "data): self.data = data @property def data( self, ) -> Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey,", "generate_relin_keys: bool = False ): \"\"\"Drop secret part from the context. This is", "def serialize( self, save_public_key: bool = True, save_secret_key: bool = False, save_galois_keys: bool", "= ENCRYPTION_TYPE.ASYMMETRIC, n_threads: int = None, data: ts._ts_cpp.TenSEALContext = None, ): \"\"\"Construct a", "\"\"\"Construct a context from a serialized buffer. Args: data : bytes buffer from", "type {native_type}\") self._data = value def copy(self) -> \"Context\": return self._wrap(self.data.copy()) def __copy__(self)", "modulus. Should not be passed when the scheme is CKKS. coeff_mod_bit_sizes: List of", "self.data.global_scale @global_scale.setter def global_scale(self, value: float): self.data.global_scale = value @classmethod def _wrap(cls, data:", "\"\"\"Set the wrapped low level key object\"\"\" native_type = getattr(ts._ts_cpp, self.__class__.__name__) if not", "that shall be later used for parallel computation. Returns: A Context object. \"\"\"", "bool = False, save_galois_keys: bool = True, save_relin_keys: bool = True, ) ->", "related to the encrypted computation, including keys, which optimization should be enabled, and", "\"\"\" if n_threads: return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data, n_threads)) return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data)) def serialize( self, save_public_key: bool", "self.data.auto_mod_switch = value @property def auto_relin(self) -> bool: return self.data.auto_relin @auto_relin.setter def auto_relin(self,", "self, value: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ): \"\"\"Set the wrapped low", "later used for parallel computation. Returns: A Context object. \"\"\" if n_threads: return", "self.data = data return # constructing a new object if scheme == SCHEME_TYPE.BFV:", "class for encryption keys\"\"\" def __init__(self, data): self.data = data @property def data(", "of the polynomial modulus, must be a power of two. plain_modulus: The plaintext", "-> bool: return self.data.auto_rescale @auto_rescale.setter def auto_rescale(self, value: bool): self.data.auto_rescale = value def", "wrapping if data is not None: self.data = data return # constructing a", "which optimization should be enabled, and how many threads should run for a", "None, data: ts._ts_cpp.TenSEALContext = None, ): \"\"\"Construct a context that holds keys and", "object\"\"\" return cls(data) # We have a class for every key type, to", "value: bool): self.data.auto_relin = value @property def auto_rescale(self) -> bool: return self.data.auto_rescale @auto_rescale.setter", "encryption_type : define the encryption type to be used, either ENCRYPTION_TYPE.ASYMMETRIC, or ENCRYPTION_TYPE.SYMMETRIC.", "is_private(self) -> bool: return self.data.is_private() def is_public(self) -> bool: return self.data.is_public() def make_context_public(", "part from the context. This is useful before sending the context for remote", "is None: self.data.generate_relin_keys() elif isinstance(secret_key, SecretKey): self.data.generate_relin_keys(secret_key.data) else: raise TypeError(f\"incorrect type: {type(secret_key)} !=", "ENCRYPTION_TYPE(Enum): ASYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.ASYMMETRIC SYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.SYMMETRIC class SCHEME_TYPE(Enum): NONE = ts._ts_cpp.SCHEME_TYPE.NONE BFV", "-> RelinKeys: return RelinKeys(self.data.relin_keys()) def generate_relin_keys(self, secret_key: SecretKey = None): if secret_key is", "!= SecretKey\") def has_secret_key(self) -> bool: return self.data.has_secret_key() def secret_key(self) -> SecretKey: return", "remote computation, as we don't want to send the secret-key that can be", "def _wrap( cls, data: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ): \"\"\"Return a", "key type, to differentiate between them only class SecretKey(Key): pass class PublicKey(Key): pass", "want to send the secret-key that can be used to decrypt values. Args:", "class SecretKey(Key): pass class PublicKey(Key): pass class GaloisKeys(Key): pass class RelinKeys(Key): pass class", "coeff_mod_bit_sizes: List of bit size for each coeffecient modulus. Can be an empty", "bool: return self.data.auto_relin @auto_relin.setter def auto_relin(self, value: bool): self.data.auto_relin = value @property def", "import ABC import tenseal as ts class ENCRYPTION_TYPE(Enum): ASYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.ASYMMETRIC SYMMETRIC =", "modulus. Can be an empty list for BFV, a default value will be", "be of type {native_type}\") self._data = value @classmethod def _wrap( cls, data: Union[", "\"Context\": \"\"\"Construct a context from a serialized buffer. Args: data : bytes buffer", "save_public_key, save_secret_key, save_galois_keys, save_relin_keys ) @property def global_scale(self) -> float: return self.data.global_scale @global_scale.setter", "bool: return self.data.auto_mod_switch @auto_mod_switch.setter def auto_mod_switch(self, value: bool): self.data.auto_mod_switch = value @property def", "constructing a new object if scheme == SCHEME_TYPE.BFV: if plain_modulus is None: raise", "secret_key: SecretKey = None): if secret_key is None: self.data.generate_relin_keys() elif isinstance(secret_key, SecretKey): self.data.generate_relin_keys(secret_key.data)", "make_context_public( self, generate_galois_keys: bool = False, generate_relin_keys: bool = False ): \"\"\"Drop secret", "type {native_type}\") self._data = value @classmethod def _wrap( cls, data: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey,", "ckks plain_modulus = 0 else: raise ValueError(\"Invalid scheme type, use either SCHEME_TYPE.BFV or", "-> PublicKey: return PublicKey(self.data.public_key()) def is_private(self) -> bool: return self.data.is_private() def is_public(self) ->", "be of type {native_type}\") self._data = value def copy(self) -> \"Context\": return self._wrap(self.data.copy())", "a new object if it's passed. Returns: A Context object. \"\"\" # wrapping", "def relin_keys(self) -> RelinKeys: return RelinKeys(self.data.relin_keys()) def generate_relin_keys(self, secret_key: SecretKey = None): if", "= None, coeff_mod_bit_sizes: List[int] = [], encryption_type: ENCRYPTION_TYPE = ENCRYPTION_TYPE.ASYMMETRIC, n_threads: int =", "self.data.has_galois_keys() def galois_keys(self) -> GaloisKeys: return GaloisKeys(self.data.galois_keys()) def generate_galois_keys(self, secret_key: SecretKey = None):", "GaloisKeys: return GaloisKeys(self.data.galois_keys()) def generate_galois_keys(self, secret_key: SecretKey = None): if secret_key is None:", "the scheme to be used, either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS. poly_modulus_degree: The degree of", "won't construct a new object if it's passed. Returns: A Context object. \"\"\"", "= ts._ts_cpp.TenSEALContext if not isinstance(value, native_type): raise TypeError(f\"value must be of type {native_type}\")", "the encrypted computation, including keys, which optimization should be enabled, and how many", "of threads that shall be later used for parallel computation. Returns: A Context", "wrapping the low level key object\"\"\" return cls(data) # We have a class", "plain_modulus, coeff_mod_bit_sizes, encryption_type.value, n_threads, ) @property def data(self) -> ts._ts_cpp.TenSEALContext: \"\"\"Get the wrapped", "= getattr(ts._ts_cpp, self.__class__.__name__) if not isinstance(value, native_type): raise TypeError(f\"value must be of type", "return cls(data=data) @property def auto_mod_switch(self) -> bool: return self.data.auto_mod_switch @auto_mod_switch.setter def auto_mod_switch(self, value:", "a class for every key type, to differentiate between them only class SecretKey(Key):", "value: bool): self.data.auto_rescale = value def has_galois_keys(self) -> bool: return self.data.has_galois_keys() def galois_keys(self)", "= None) -> \"Context\": \"\"\"Construct a context from a serialized buffer. Args: data", "bool): self.data.auto_mod_switch = value @property def auto_relin(self) -> bool: return self.data.auto_relin @auto_relin.setter def", "bool: return self.data.has_secret_key() def secret_key(self) -> SecretKey: return SecretKey(self.data.secret_key()) def has_public_key(self) -> bool:", "return self.data.is_public() def make_context_public( self, generate_galois_keys: bool = False, generate_relin_keys: bool = False", "RelinKeys: return RelinKeys(self.data.relin_keys()) def generate_relin_keys(self, secret_key: SecretKey = None): if secret_key is None:", "): \"\"\"Construct a context that holds keys and parameters needed for operating encrypted", "for operating encrypted tensors using either BFV or CKKS scheme. Args: scheme :", "secret_key is None: self.data.generate_relin_keys() elif isinstance(secret_key, SecretKey): self.data.generate_relin_keys(secret_key.data) else: raise TypeError(f\"incorrect type: {type(secret_key)}", "run for a parallel computation. \"\"\" import multiprocessing from enum import Enum from", "= None, poly_modulus_degree: int = None, plain_modulus: int = None, coeff_mod_bit_sizes: List[int] =", "None, coeff_mod_bit_sizes: List[int] = [], encryption_type: ENCRYPTION_TYPE = ENCRYPTION_TYPE.ASYMMETRIC, n_threads: int = None,", "bool = True, save_relin_keys: bool = True, ) -> bytes: \"\"\"Serialize the context", "ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ]: \"\"\"Get the wrapped low level key object\"\"\" return self._data @data.setter", "SecretKey\") def has_secret_key(self) -> bool: return self.data.has_secret_key() def secret_key(self) -> SecretKey: return SecretKey(self.data.secret_key())", "None, poly_modulus_degree: int = None, plain_modulus: int = None, coeff_mod_bit_sizes: List[int] = [],", "that holds keys and parameters needed for operating encrypted tensors using either BFV", "n_threads: define number of threads that shall be later used for parallel computation.", "tenseal as ts class ENCRYPTION_TYPE(Enum): ASYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.ASYMMETRIC SYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.SYMMETRIC class SCHEME_TYPE(Enum):", "class ENCRYPTION_TYPE(Enum): ASYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.ASYMMETRIC SYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.SYMMETRIC class SCHEME_TYPE(Enum): NONE = ts._ts_cpp.SCHEME_TYPE.NONE", "the low level key object\"\"\" return cls(data) # We have a class for", "has_public_key(self) -> bool: return self.data.has_public_key() def public_key(self) -> PublicKey: return PublicKey(self.data.public_key()) def is_private(self)", "for remote computation, as we don't want to send the secret-key that can", "SCHEME_TYPE.CKKS: # must be int, but the value doesn't matter for ckks plain_modulus", "CKKS scheme. Args: scheme : define the scheme to be used, either SCHEME_TYPE.BFV", "ts class ENCRYPTION_TYPE(Enum): ASYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.ASYMMETRIC SYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.SYMMETRIC class SCHEME_TYPE(Enum): NONE =", "parallel computation. \"\"\" import multiprocessing from enum import Enum from typing import List,", "return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data, n_threads)) return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data)) def serialize( self, save_public_key: bool = True, save_secret_key:", "construct a new object if it's passed. Returns: A Context object. \"\"\" #", "]: \"\"\"Get the wrapped low level key object\"\"\" return self._data @data.setter def data(", "and parameters needed for operating encrypted tensors using either BFV or CKKS scheme.", "send the secret-key that can be used to decrypt values. Args: generate_galois_keys: should", ": bytes buffer from the original context. n_threads: define number of threads that", "either ENCRYPTION_TYPE.ASYMMETRIC, or ENCRYPTION_TYPE.SYMMETRIC. n_threads: define number of threads that shall be later", "float: return self.data.global_scale @global_scale.setter def global_scale(self, value: float): self.data.global_scale = value @classmethod def", "to the encrypted computation, including keys, which optimization should be enabled, and how", "ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ): \"\"\"Set the wrapped low level key object\"\"\" native_type", "wrapped low level TenSEALContext object\"\"\" native_type = ts._ts_cpp.TenSEALContext if not isinstance(value, native_type): raise", "= value @property def auto_rescale(self) -> bool: return self.data.auto_rescale @auto_rescale.setter def auto_rescale(self, value:", "cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data)) def serialize( self, save_public_key: bool = True, save_secret_key: bool = False, save_galois_keys:", "buffer. Args: data : bytes buffer from the original context. n_threads: define number", "be an empty list for BFV, a default value will be given. encryption_type", "\"Context\": \"\"\"Return a new Context object wrapping the low level TenSEALContext object\"\"\" return", "ValueError(\"Invalid scheme type, use either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS\") # We can't pass None", "True, save_secret_key: bool = False, save_galois_keys: bool = True, save_relin_keys: bool = True,", "raise TypeError(f\"incorrect type: {type(secret_key)} != SecretKey\") def has_secret_key(self) -> bool: return self.data.has_secret_key() def", "pass class Context: def __init__( self, scheme: SCHEME_TYPE = None, poly_modulus_degree: int =", "0 else: raise ValueError(\"Invalid scheme type, use either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS\") # We", "object\"\"\" return self._data @data.setter def data( self, value: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys", "if not isinstance(value, native_type): raise TypeError(f\"value must be of type {native_type}\") self._data =", "have a class for every key type, to differentiate between them only class", "class PublicKey(Key): pass class GaloisKeys(Key): pass class RelinKeys(Key): pass class Context: def __init__(", "each coeffecient modulus. Can be an empty list for BFV, a default value", ") @property def data(self) -> ts._ts_cpp.TenSEALContext: \"\"\"Get the wrapped low level TenSEALContext object\"\"\"", "bool = True, save_secret_key: bool = False, save_galois_keys: bool = True, save_relin_keys: bool", "as we don't want to send the secret-key that can be used to", "computation, as we don't want to send the secret-key that can be used", "ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ]: \"\"\"Get the wrapped low level key object\"\"\" return", "SecretKey: return SecretKey(self.data.secret_key()) def has_public_key(self) -> bool: return self.data.has_public_key() def public_key(self) -> PublicKey:", "be set prior to this call if not (isinstance(n_threads, int) and n_threads >", "import tenseal as ts class ENCRYPTION_TYPE(Enum): ASYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.ASYMMETRIC SYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.SYMMETRIC class", "the context into a stream of bytes.\"\"\" return self.data.serialize( save_public_key, save_secret_key, save_galois_keys, save_relin_keys", "raise ValueError(\"plain_modulus must be provided\") elif scheme == SCHEME_TYPE.CKKS: # must be int,", "galois-keys before dropping the secret-key? generate_relin_keys: should we generate relin-keys before dropping the", "an empty list for BFV, a default value will be given. encryption_type :", "coeff_mod_bit_sizes, encryption_type.value, n_threads, ) @property def data(self) -> ts._ts_cpp.TenSEALContext: \"\"\"Get the wrapped low", "{type(secret_key)} != SecretKey\") def has_secret_key(self) -> bool: return self.data.has_secret_key() def secret_key(self) -> SecretKey:", "cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data, n_threads)) return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data)) def serialize( self, save_public_key: bool = True, save_secret_key: bool", "if secret_key is None: self.data.generate_galois_keys() elif isinstance(secret_key, SecretKey): self.data.generate_galois_keys(secret_key.data) else: raise TypeError(f\"incorrect type:", "data: ts._ts_cpp.TenSEALContext) -> \"Context\": \"\"\"Return a new Context object wrapping the low level", "__init__( self, scheme: SCHEME_TYPE = None, poly_modulus_degree: int = None, plain_modulus: int =", "stream of bytes.\"\"\" return self.data.serialize( save_public_key, save_secret_key, save_galois_keys, save_relin_keys ) @property def global_scale(self)", "typing import List, Union from abc import ABC import tenseal as ts class", "bool: return self.data.is_private() def is_public(self) -> bool: return self.data.is_public() def make_context_public( self, generate_galois_keys:", "@property def data( self, ) -> Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ]: \"\"\"Get", "ts._ts_cpp.RelinKeys ], ): \"\"\"Set the wrapped low level key object\"\"\" native_type = getattr(ts._ts_cpp,", "= None, plain_modulus: int = None, coeff_mod_bit_sizes: List[int] = [], encryption_type: ENCRYPTION_TYPE =", "return self.data.is_private() def is_public(self) -> bool: return self.data.is_public() def make_context_public( self, generate_galois_keys: bool", "class SCHEME_TYPE(Enum): NONE = ts._ts_cpp.SCHEME_TYPE.NONE BFV = ts._ts_cpp.SCHEME_TYPE.BFV CKKS = ts._ts_cpp.SCHEME_TYPE.CKKS class Key(ABC):", "passed. Returns: A Context object. \"\"\" # wrapping if data is not None:", "self.data = ts._ts_cpp.TenSEALContext.new( scheme.value, poly_modulus_degree, plain_modulus, coeff_mod_bit_sizes, encryption_type.value, n_threads, ) @property def data(self)", "SCHEME_TYPE = None, poly_modulus_degree: int = None, plain_modulus: int = None, coeff_mod_bit_sizes: List[int]", "class for every key type, to differentiate between them only class SecretKey(Key): pass", "modulus, must be a power of two. plain_modulus: The plaintext modulus. Should not", "A Context object. \"\"\" if n_threads: return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data, n_threads)) return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data)) def serialize(", "context for remote computation, as we don't want to send the secret-key that", "int = None) -> \"Context\": \"\"\"Construct a context from a serialized buffer. Args:", "None: self.data = data return # constructing a new object if scheme ==", "object. \"\"\" # wrapping if data is not None: self.data = data return", "computation. Returns: A Context object. \"\"\" if n_threads: return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data, n_threads)) return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data))", "serialize( self, save_public_key: bool = True, save_secret_key: bool = False, save_galois_keys: bool =", "secret_key(self) -> SecretKey: return SecretKey(self.data.secret_key()) def has_public_key(self) -> bool: return self.data.has_public_key() def public_key(self)", "object if it's passed. Returns: A Context object. \"\"\" # wrapping if data", "data(self) -> ts._ts_cpp.TenSEALContext: \"\"\"Get the wrapped low level TenSEALContext object\"\"\" return self._data @data.setter", "None: self.data.generate_galois_keys() elif isinstance(secret_key, SecretKey): self.data.generate_galois_keys(secret_key.data) else: raise TypeError(f\"incorrect type: {type(secret_key)} != SecretKey\")", "\"\"\"Set the wrapped low level TenSEALContext object\"\"\" native_type = ts._ts_cpp.TenSEALContext if not isinstance(value,", "= False ): \"\"\"Drop secret part from the context. This is useful before", "for parallel computation. data: A TenSEALContext to wrap. We won't construct a new", "else: raise ValueError(\"Invalid scheme type, use either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS\") # We can't", "the wrapped low level TenSEALContext object\"\"\" native_type = ts._ts_cpp.TenSEALContext if not isinstance(value, native_type):", "\"\"\"Return a new key object wrapping the low level key object\"\"\" return cls(data)", "# We have a class for every key type, to differentiate between them", "the scheme is CKKS. coeff_mod_bit_sizes: List of bit size for each coeffecient modulus.", "def __copy__(self) -> \"Context\": return self.copy() @classmethod def load(cls, data: bytes, n_threads: int", "to this call if not (isinstance(n_threads, int) and n_threads > 0): n_threads =", "def auto_rescale(self, value: bool): self.data.auto_rescale = value def has_galois_keys(self) -> bool: return self.data.has_galois_keys()", "None, plain_modulus: int = None, coeff_mod_bit_sizes: List[int] = [], encryption_type: ENCRYPTION_TYPE = ENCRYPTION_TYPE.ASYMMETRIC,", "cls(data=data) @property def auto_mod_switch(self) -> bool: return self.data.auto_mod_switch @auto_mod_switch.setter def auto_mod_switch(self, value: bool):", "= ts._ts_cpp.ENCRYPTION_TYPE.SYMMETRIC class SCHEME_TYPE(Enum): NONE = ts._ts_cpp.SCHEME_TYPE.NONE BFV = ts._ts_cpp.SCHEME_TYPE.BFV CKKS = ts._ts_cpp.SCHEME_TYPE.CKKS", "return self.data.global_scale @global_scale.setter def global_scale(self, value: float): self.data.global_scale = value @classmethod def _wrap(cls,", "bit size for each coeffecient modulus. Can be an empty list for BFV,", "def auto_relin(self, value: bool): self.data.auto_relin = value @property def auto_rescale(self) -> bool: return", "auto_rescale(self, value: bool): self.data.auto_rescale = value def has_galois_keys(self) -> bool: return self.data.has_galois_keys() def", "type: {type(secret_key)} != SecretKey\") def has_secret_key(self) -> bool: return self.data.has_secret_key() def secret_key(self) ->", "SecretKey(Key): pass class PublicKey(Key): pass class GaloisKeys(Key): pass class RelinKeys(Key): pass class Context:", "def load(cls, data: bytes, n_threads: int = None) -> \"Context\": \"\"\"Construct a context", "native_type = ts._ts_cpp.TenSEALContext if not isinstance(value, native_type): raise TypeError(f\"value must be of type", "BFV or CKKS scheme. Args: scheme : define the scheme to be used,", "between them only class SecretKey(Key): pass class PublicKey(Key): pass class GaloisKeys(Key): pass class", "__copy__(self) -> \"Context\": return self.copy() @classmethod def load(cls, data: bytes, n_threads: int =", "new object if scheme == SCHEME_TYPE.BFV: if plain_modulus is None: raise ValueError(\"plain_modulus must", "if secret_key is None: self.data.generate_relin_keys() elif isinstance(secret_key, SecretKey): self.data.generate_relin_keys(secret_key.data) else: raise TypeError(f\"incorrect type:", "later used for parallel computation. data: A TenSEALContext to wrap. We won't construct", "import List, Union from abc import ABC import tenseal as ts class ENCRYPTION_TYPE(Enum):", "generate galois-keys before dropping the secret-key? generate_relin_keys: should we generate relin-keys before dropping", "self.data.is_public() def make_context_public( self, generate_galois_keys: bool = False, generate_relin_keys: bool = False ):", "must be int, but the value doesn't matter for ckks plain_modulus = 0", ": define the encryption type to be used, either ENCRYPTION_TYPE.ASYMMETRIC, or ENCRYPTION_TYPE.SYMMETRIC. n_threads:", "def copy(self) -> \"Context\": return self._wrap(self.data.copy()) def __copy__(self) -> \"Context\": return self.copy() @classmethod", "call if not (isinstance(n_threads, int) and n_threads > 0): n_threads = multiprocessing.cpu_count() self.data", "polynomial modulus, must be a power of two. plain_modulus: The plaintext modulus. Should", "the wrapped low level TenSEALContext object\"\"\" return self._data @data.setter def data(self, value: ts._ts_cpp.TenSEALContext):", "pass class GaloisKeys(Key): pass class RelinKeys(Key): pass class Context: def __init__( self, scheme:", "GaloisKeys(Key): pass class RelinKeys(Key): pass class Context: def __init__( self, scheme: SCHEME_TYPE =", "encryption type to be used, either ENCRYPTION_TYPE.ASYMMETRIC, or ENCRYPTION_TYPE.SYMMETRIC. n_threads: define number of", "else: raise TypeError(f\"incorrect type: {type(secret_key)} != SecretKey\") def has_relin_keys(self) -> bool: return self.data.has_relin_keys()", "ASYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.ASYMMETRIC SYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.SYMMETRIC class SCHEME_TYPE(Enum): NONE = ts._ts_cpp.SCHEME_TYPE.NONE BFV =", "copy(self) -> \"Context\": return self._wrap(self.data.copy()) def __copy__(self) -> \"Context\": return self.copy() @classmethod def", "auto_relin(self, value: bool): self.data.auto_relin = value @property def auto_rescale(self) -> bool: return self.data.auto_rescale", "bool): self.data.auto_relin = value @property def auto_rescale(self) -> bool: return self.data.auto_rescale @auto_rescale.setter def", "of type {native_type}\") self._data = value @classmethod def _wrap( cls, data: Union[ ts._ts_cpp.PublicKey,", "or CKKS scheme. Args: scheme : define the scheme to be used, either", "key object\"\"\" return cls(data) # We have a class for every key type,", "secret-key that can be used to decrypt values. Args: generate_galois_keys: should we generate", "value @classmethod def _wrap( cls, data: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ):", "should we generate relin-keys before dropping the secret-key? \"\"\" self.data.make_context_public( generate_galois_keys=generate_galois_keys, generate_relin_keys=generate_relin_keys )", "SCHEME_TYPE.BFV: if plain_modulus is None: raise ValueError(\"plain_modulus must be provided\") elif scheme ==", "context that holds keys and parameters needed for operating encrypted tensors using either", "them only class SecretKey(Key): pass class PublicKey(Key): pass class GaloisKeys(Key): pass class RelinKeys(Key):", "= None): if secret_key is None: self.data.generate_relin_keys() elif isinstance(secret_key, SecretKey): self.data.generate_relin_keys(secret_key.data) else: raise", "cls, data: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ): \"\"\"Return a new key", "cls(data) # We have a class for every key type, to differentiate between", "low level key object\"\"\" return cls(data) # We have a class for every", "return self.copy() @classmethod def load(cls, data: bytes, n_threads: int = None) -> \"Context\":", "be int, but the value doesn't matter for ckks plain_modulus = 0 else:", "level key object\"\"\" return cls(data) # We have a class for every key", "poly_modulus_degree, plain_modulus, coeff_mod_bit_sizes, encryption_type.value, n_threads, ) @property def data(self) -> ts._ts_cpp.TenSEALContext: \"\"\"Get the", "= ts._ts_cpp.TenSEALContext.new( scheme.value, poly_modulus_degree, plain_modulus, coeff_mod_bit_sizes, encryption_type.value, n_threads, ) @property def data(self) ->", "@property def data(self) -> ts._ts_cpp.TenSEALContext: \"\"\"Get the wrapped low level TenSEALContext object\"\"\" return", "\"\"\" # wrapping if data is not None: self.data = data return #", "elif isinstance(secret_key, SecretKey): self.data.generate_relin_keys(secret_key.data) else: raise TypeError(f\"incorrect type: {type(secret_key)} != SecretKey\") def has_secret_key(self)", "-> bytes: \"\"\"Serialize the context into a stream of bytes.\"\"\" return self.data.serialize( save_public_key,", "\"\"\"Drop secret part from the context. This is useful before sending the context", "don't want to send the secret-key that can be used to decrypt values.", "__init__(self, data): self.data = data @property def data( self, ) -> Union[ ts._ts_cpp.PublicKey,", "ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ): \"\"\"Return a new key object wrapping the low level", "we generate galois-keys before dropping the secret-key? generate_relin_keys: should we generate relin-keys before", "def generate_galois_keys(self, secret_key: SecretKey = None): if secret_key is None: self.data.generate_galois_keys() elif isinstance(secret_key,", "\"\"\"Return a new Context object wrapping the low level TenSEALContext object\"\"\" return cls(data=data)", "wrap. We won't construct a new object if it's passed. Returns: A Context", "value @property def auto_rescale(self) -> bool: return self.data.auto_rescale @auto_rescale.setter def auto_rescale(self, value: bool):", "def auto_relin(self) -> bool: return self.data.auto_relin @auto_relin.setter def auto_relin(self, value: bool): self.data.auto_relin =", "ts._ts_cpp.TenSEALContext) -> \"Context\": \"\"\"Return a new Context object wrapping the low level TenSEALContext", "used, either ENCRYPTION_TYPE.ASYMMETRIC, or ENCRYPTION_TYPE.SYMMETRIC. n_threads: define number of threads that shall be", "it's passed. Returns: A Context object. \"\"\" # wrapping if data is not", "== SCHEME_TYPE.BFV: if plain_modulus is None: raise ValueError(\"plain_modulus must be provided\") elif scheme", "SCHEME_TYPE.CKKS. poly_modulus_degree: The degree of the polynomial modulus, must be a power of", "Args: generate_galois_keys: should we generate galois-keys before dropping the secret-key? generate_relin_keys: should we", "low level TenSEALContext object\"\"\" return self._data @data.setter def data(self, value: ts._ts_cpp.TenSEALContext): \"\"\"Set the", "galois_keys(self) -> GaloisKeys: return GaloisKeys(self.data.galois_keys()) def generate_galois_keys(self, secret_key: SecretKey = None): if secret_key", "a stream of bytes.\"\"\" return self.data.serialize( save_public_key, save_secret_key, save_galois_keys, save_relin_keys ) @property def", "BFV = ts._ts_cpp.SCHEME_TYPE.BFV CKKS = ts._ts_cpp.SCHEME_TYPE.CKKS class Key(ABC): \"\"\"Wrapper class for encryption keys\"\"\"", "plain_modulus = 0 else: raise ValueError(\"Invalid scheme type, use either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS\")", "auto_mod_switch(self) -> bool: return self.data.auto_mod_switch @auto_mod_switch.setter def auto_mod_switch(self, value: bool): self.data.auto_mod_switch = value", "to decrypt values. Args: generate_galois_keys: should we generate galois-keys before dropping the secret-key?", "Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ]: \"\"\"Get the wrapped low level key object\"\"\"", "A TenSEALContext to wrap. We won't construct a new object if it's passed.", "-> bool: return self.data.has_secret_key() def secret_key(self) -> SecretKey: return SecretKey(self.data.secret_key()) def has_public_key(self) ->", "SYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.SYMMETRIC class SCHEME_TYPE(Enum): NONE = ts._ts_cpp.SCHEME_TYPE.NONE BFV = ts._ts_cpp.SCHEME_TYPE.BFV CKKS =", "\"\"\"Get the wrapped low level key object\"\"\" return self._data @data.setter def data( self,", "default value will be given. encryption_type : define the encryption type to be", "before sending the context for remote computation, as we don't want to send", "for each coeffecient modulus. Can be an empty list for BFV, a default", ") @property def global_scale(self) -> float: return self.data.global_scale @global_scale.setter def global_scale(self, value: float):", "to send the secret-key that can be used to decrypt values. Args: generate_galois_keys:", "scheme == SCHEME_TYPE.CKKS: # must be int, but the value doesn't matter for", "of bit size for each coeffecient modulus. Can be an empty list for", "ts._ts_cpp.SCHEME_TYPE.BFV CKKS = ts._ts_cpp.SCHEME_TYPE.CKKS class Key(ABC): \"\"\"Wrapper class for encryption keys\"\"\" def __init__(self,", "shall be later used for parallel computation. Returns: A Context object. \"\"\" if", "differentiate between them only class SecretKey(Key): pass class PublicKey(Key): pass class GaloisKeys(Key): pass", "key object\"\"\" return self._data @data.setter def data( self, value: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys,", "abc import ABC import tenseal as ts class ENCRYPTION_TYPE(Enum): ASYMMETRIC = ts._ts_cpp.ENCRYPTION_TYPE.ASYMMETRIC SYMMETRIC", "List, Union from abc import ABC import tenseal as ts class ENCRYPTION_TYPE(Enum): ASYMMETRIC", "None): if secret_key is None: self.data.generate_galois_keys() elif isinstance(secret_key, SecretKey): self.data.generate_galois_keys(secret_key.data) else: raise TypeError(f\"incorrect", "has_relin_keys(self) -> bool: return self.data.has_relin_keys() def relin_keys(self) -> RelinKeys: return RelinKeys(self.data.relin_keys()) def generate_relin_keys(self,", "self, save_public_key: bool = True, save_secret_key: bool = False, save_galois_keys: bool = True,", "@property def auto_relin(self) -> bool: return self.data.auto_relin @auto_relin.setter def auto_relin(self, value: bool): self.data.auto_relin", "CKKS. coeff_mod_bit_sizes: List of bit size for each coeffecient modulus. Can be an", "The plaintext modulus. Should not be passed when the scheme is CKKS. coeff_mod_bit_sizes:", "ENCRYPTION_TYPE.ASYMMETRIC, n_threads: int = None, data: ts._ts_cpp.TenSEALContext = None, ): \"\"\"Construct a context", "TenSEALContext object\"\"\" native_type = ts._ts_cpp.TenSEALContext if not isinstance(value, native_type): raise TypeError(f\"value must be", "power of two. plain_modulus: The plaintext modulus. Should not be passed when the", "def data(self, value: ts._ts_cpp.TenSEALContext): \"\"\"Set the wrapped low level TenSEALContext object\"\"\" native_type =", "can't pass None here, everything should be set prior to this call if", "be a power of two. plain_modulus: The plaintext modulus. Should not be passed", "self.data.auto_rescale = value def has_galois_keys(self) -> bool: return self.data.has_galois_keys() def galois_keys(self) -> GaloisKeys:", "None, ): \"\"\"Construct a context that holds keys and parameters needed for operating", "be later used for parallel computation. data: A TenSEALContext to wrap. We won't", "@classmethod def load(cls, data: bytes, n_threads: int = None) -> \"Context\": \"\"\"Construct a", ") -> bytes: \"\"\"Serialize the context into a stream of bytes.\"\"\" return self.data.serialize(", "bool: return self.data.auto_rescale @auto_rescale.setter def auto_rescale(self, value: bool): self.data.auto_rescale = value def has_galois_keys(self)", "def data( self, value: Union[ ts._ts_cpp.PublicKey, ts._ts_cpp.SecretKey, ts._ts_cpp.GaloisKeys, ts._ts_cpp.RelinKeys ], ): \"\"\"Set the", "or SCHEME_TYPE.CKKS. poly_modulus_degree: The degree of the polynomial modulus, must be a power", "operating encrypted tensors using either BFV or CKKS scheme. Args: scheme : define", "object wrapping the low level key object\"\"\" return cls(data) # We have a", "int) and n_threads > 0): n_threads = multiprocessing.cpu_count() self.data = ts._ts_cpp.TenSEALContext.new( scheme.value, poly_modulus_degree,", "raise ValueError(\"Invalid scheme type, use either SCHEME_TYPE.BFV or SCHEME_TYPE.CKKS\") # We can't pass", "None: self.data.generate_relin_keys() elif isinstance(secret_key, SecretKey): self.data.generate_relin_keys(secret_key.data) else: raise TypeError(f\"incorrect type: {type(secret_key)} != SecretKey\")", "if n_threads: return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data, n_threads)) return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data)) def serialize( self, save_public_key: bool =", "def has_galois_keys(self) -> bool: return self.data.has_galois_keys() def galois_keys(self) -> GaloisKeys: return GaloisKeys(self.data.galois_keys()) def", "used for parallel computation. Returns: A Context object. \"\"\" if n_threads: return cls._wrap(ts._ts_cpp.TenSEALContext.deserialize(data,", "else: raise TypeError(f\"incorrect type: {type(secret_key)} != SecretKey\") def has_secret_key(self) -> bool: return self.data.has_secret_key()", "self.data.auto_rescale @auto_rescale.setter def auto_rescale(self, value: bool): self.data.auto_rescale = value def has_galois_keys(self) -> bool:", "native_type): raise TypeError(f\"value must be of type {native_type}\") self._data = value @classmethod def", "context into a stream of bytes.\"\"\" return self.data.serialize( save_public_key, save_secret_key, save_galois_keys, save_relin_keys )", "buffer from the original context. n_threads: define number of threads that shall be", "secret part from the context. This is useful before sending the context for", "We can't pass None here, everything should be set prior to this call", "save_galois_keys, save_relin_keys ) @property def global_scale(self) -> float: return self.data.global_scale @global_scale.setter def global_scale(self,", "into a stream of bytes.\"\"\" return self.data.serialize( save_public_key, save_secret_key, save_galois_keys, save_relin_keys ) @property", "define the encryption type to be used, either ENCRYPTION_TYPE.ASYMMETRIC, or ENCRYPTION_TYPE.SYMMETRIC. n_threads: define", "= data return # constructing a new object if scheme == SCHEME_TYPE.BFV: if", "passed when the scheme is CKKS. coeff_mod_bit_sizes: List of bit size for each", "here, everything should be set prior to this call if not (isinstance(n_threads, int)", "computation, including keys, which optimization should be enabled, and how many threads should", "low level TenSEALContext object\"\"\" return cls(data=data) @property def auto_mod_switch(self) -> bool: return self.data.auto_mod_switch", "needed for operating encrypted tensors using either BFV or CKKS scheme. Args: scheme", "number of threads that shall be later used for parallel computation. data: A", "and how many threads should run for a parallel computation. \"\"\" import multiprocessing", "auto_rescale(self) -> bool: return self.data.auto_rescale @auto_rescale.setter def auto_rescale(self, value: bool): self.data.auto_rescale = value", "load(cls, data: bytes, n_threads: int = None) -> \"Context\": \"\"\"Construct a context from" ]
[ "setUp(self): \"\"\"Test case setup, building de instances that are needes in the test", "(self): post = Post.objects.filter(latitude = 3.2, longitude= 3.4) self.assertIsNotNone(post) def test_search_post_for_type_catastrophe(self): post =", "Profile.objects.create( user= self.user, latitude= '1.23', longitude= '1.22' ) self.post = Post.objects.create( user= self.user,", "self.assertEqual(len(listUser), 1) def test_persist_profile(self): listUser = Profile.objects.all() self.assertEqual(len(listUser), 1) def test_persistence_post(self): post =", "from AmbieNet.posts.models import Post class LoginAPITestCase(APITestCase): \"\"\"User login test case.\"\"\" def setUp(self): \"\"\"Test", "= Post.objects.filter (type_catastrophe='maremoto') self.assertIsNotNone(post) def test_search_user_for_username(self): user = User.objects.filter(username='saenzavs') self.assertIsNotNone(user) def test_search_post_for_user(self): user", "needes in the test case.\"\"\" self.user = User.objects.create( username= 'saenzavs', email= '<EMAIL>', password=", "test_search_users_home (self): post = Post.objects.filter(latitude = 3.2, longitude= 3.4) self.assertIsNotNone(post) def test_search_post_for_type_catastrophe(self): post", "def test_persist_user(self): listUser = User.objects.all() self.assertEqual(len(listUser), 1) def test_persist_profile(self): listUser = Profile.objects.all() self.assertEqual(len(listUser),", "} self.url = '/posts/' def test_persist_user(self): listUser = User.objects.all() self.assertEqual(len(listUser), 1) def test_persist_profile(self):", "'<PASSWORD>' } self.url = '/posts/' def test_persist_user(self): listUser = User.objects.all() self.assertEqual(len(listUser), 1) def", "photo='string', title='Stunami en la casa de saenz', description='se les creció el rio calarca',", "longitude= 3.4) self.assertIsNotNone(post) def test_search_users_home (self): post = Post.objects.filter(latitude = 3.2, longitude= 3.4)", "building de instances that are needes in the test case.\"\"\" self.user = User.objects.create(", "from rest_framework.test import APITestCase #Model from AmbieNet.users.models import User, Profile from AmbieNet.posts.models import", "self.post = Post.objects.create( user= self.user, profile = self.profile, photo='string', title='Stunami en la casa", "def test_search_user_for_username(self): user = User.objects.filter(username='saenzavs') self.assertIsNotNone(user) def test_search_post_for_user(self): user = User.objects.get(username='saenzavs') post =", "'steven', last_name= 'saenz', ) self.profile = Profile.objects.create( user= self.user, latitude= '1.23', longitude= '1.22'", "AmbieNet.users.models import User, Profile from AmbieNet.posts.models import Post class LoginAPITestCase(APITestCase): \"\"\"User login test", "self.assertEqual(len(listUser), 1) def test_persistence_post(self): post = Post.objects.filter(user= self.user) self.assertIsNotNone(post) def test_filter_post_for_ubication(self): post =", "test_filter_post_for_ubication(self): post = Post.objects.filter (latitude = 3.2, longitude= 3.4) self.assertIsNotNone(post) def test_search_users_home (self):", "AmbieNet.posts.models import Post class LoginAPITestCase(APITestCase): \"\"\"User login test case.\"\"\" def setUp(self): \"\"\"Test case", "status from rest_framework.test import APITestCase #Model from AmbieNet.users.models import User, Profile from AmbieNet.posts.models", "import TestCase from django.urls import reverse, path # Django REST Framework from rest_framework", "Django REST Framework from rest_framework import status from rest_framework.test import APITestCase #Model from", "Framework from rest_framework import status from rest_framework.test import APITestCase #Model from AmbieNet.users.models import", "= 3.2, longitude= 3.4) self.assertIsNotNone(post) def test_search_users_home (self): post = Post.objects.filter(latitude = 3.2,", "post = Post.objects.filter (type_catastrophe='maremoto') self.assertIsNotNone(post) def test_search_user_for_username(self): user = User.objects.filter(username='saenzavs') self.assertIsNotNone(user) def test_search_post_for_user(self):", "self.url = '/posts/' def test_persist_user(self): listUser = User.objects.all() self.assertEqual(len(listUser), 1) def test_persist_profile(self): listUser", "= self.profile, photo='string', title='Stunami en la casa de saenz', description='se les creció el", "email= '<EMAIL>', password= '<PASSWORD>', phone_number= '31212231232', first_name= 'steven', last_name= 'saenz', ) self.profile =", "latitude='3.2', longitude= '3.4' ) self.data = { 'username' : 'saenzavs', 'password' : '<PASSWORD>'", "import status from rest_framework.test import APITestCase #Model from AmbieNet.users.models import User, Profile from", "longitude= 3.4) self.assertIsNotNone(post) def test_search_post_for_type_catastrophe(self): post = Post.objects.filter (type_catastrophe='maremoto') self.assertIsNotNone(post) def test_search_user_for_username(self): user", "self.assertIsNotNone(post) def test_search_post_for_type_catastrophe(self): post = Post.objects.filter (type_catastrophe='maremoto') self.assertIsNotNone(post) def test_search_user_for_username(self): user = User.objects.filter(username='saenzavs')", "3.2, longitude= 3.4) self.assertIsNotNone(post) def test_search_users_home (self): post = Post.objects.filter(latitude = 3.2, longitude=", "# Django REST Framework from rest_framework import status from rest_framework.test import APITestCase #Model", "el rio calarca', type_catastrophe='maremoto', latitude='3.2', longitude= '3.4' ) self.data = { 'username' :", "3.4) self.assertIsNotNone(post) def test_search_post_for_type_catastrophe(self): post = Post.objects.filter (type_catastrophe='maremoto') self.assertIsNotNone(post) def test_search_user_for_username(self): user =", "1) def test_persistence_post(self): post = Post.objects.filter(user= self.user) self.assertIsNotNone(post) def test_filter_post_for_ubication(self): post = Post.objects.filter", "setup, building de instances that are needes in the test case.\"\"\" self.user =", "username= 'saenzavs', email= '<EMAIL>', password= '<PASSWORD>', phone_number= '31212231232', first_name= 'steven', last_name= 'saenz', )", "saenz', description='se les creció el rio calarca', type_catastrophe='maremoto', latitude='3.2', longitude= '3.4' ) self.data", "post = Post.objects.filter(user= self.user) self.assertIsNotNone(post) def test_filter_post_for_ubication(self): post = Post.objects.filter (latitude = 3.2,", "django.urls import reverse, path # Django REST Framework from rest_framework import status from", "Post class LoginAPITestCase(APITestCase): \"\"\"User login test case.\"\"\" def setUp(self): \"\"\"Test case setup, building", "\"\"\"Test case setup, building de instances that are needes in the test case.\"\"\"", "instances that are needes in the test case.\"\"\" self.user = User.objects.create( username= 'saenzavs',", ": '<PASSWORD>' } self.url = '/posts/' def test_persist_user(self): listUser = User.objects.all() self.assertEqual(len(listUser), 1)", "rest_framework.test import APITestCase #Model from AmbieNet.users.models import User, Profile from AmbieNet.posts.models import Post", "login test case.\"\"\" def setUp(self): \"\"\"Test case setup, building de instances that are", "from django.test import TestCase from django.urls import reverse, path # Django REST Framework", "test_persist_user(self): listUser = User.objects.all() self.assertEqual(len(listUser), 1) def test_persist_profile(self): listUser = Profile.objects.all() self.assertEqual(len(listUser), 1)", "<filename>AmbieNet/users/tests/test_login.py #django from django.test import TestCase from django.urls import reverse, path # Django", "Post.objects.filter (type_catastrophe='maremoto') self.assertIsNotNone(post) def test_search_user_for_username(self): user = User.objects.filter(username='saenzavs') self.assertIsNotNone(user) def test_search_post_for_user(self): user =", "REST Framework from rest_framework import status from rest_framework.test import APITestCase #Model from AmbieNet.users.models", "{ 'username' : 'saenzavs', 'password' : '<PASSWORD>' } self.url = '/posts/' def test_persist_user(self):", "'<PASSWORD>', phone_number= '31212231232', first_name= 'steven', last_name= 'saenz', ) self.profile = Profile.objects.create( user= self.user,", "User, Profile from AmbieNet.posts.models import Post class LoginAPITestCase(APITestCase): \"\"\"User login test case.\"\"\" def", "'3.4' ) self.data = { 'username' : 'saenzavs', 'password' : '<PASSWORD>' } self.url", "'saenzavs', 'password' : '<PASSWORD>' } self.url = '/posts/' def test_persist_user(self): listUser = User.objects.all()", "are needes in the test case.\"\"\" self.user = User.objects.create( username= 'saenzavs', email= '<EMAIL>',", "APITestCase #Model from AmbieNet.users.models import User, Profile from AmbieNet.posts.models import Post class LoginAPITestCase(APITestCase):", "test case.\"\"\" self.user = User.objects.create( username= 'saenzavs', email= '<EMAIL>', password= '<PASSWORD>', phone_number= '31212231232',", "case.\"\"\" self.user = User.objects.create( username= 'saenzavs', email= '<EMAIL>', password= '<PASSWORD>', phone_number= '31212231232', first_name=", "path # Django REST Framework from rest_framework import status from rest_framework.test import APITestCase", "Profile.objects.all() self.assertEqual(len(listUser), 1) def test_persistence_post(self): post = Post.objects.filter(user= self.user) self.assertIsNotNone(post) def test_filter_post_for_ubication(self): post", "self.data = { 'username' : 'saenzavs', 'password' : '<PASSWORD>' } self.url = '/posts/'", "import User, Profile from AmbieNet.posts.models import Post class LoginAPITestCase(APITestCase): \"\"\"User login test case.\"\"\"", "class LoginAPITestCase(APITestCase): \"\"\"User login test case.\"\"\" def setUp(self): \"\"\"Test case setup, building de", "the test case.\"\"\" self.user = User.objects.create( username= 'saenzavs', email= '<EMAIL>', password= '<PASSWORD>', phone_number=", "'31212231232', first_name= 'steven', last_name= 'saenz', ) self.profile = Profile.objects.create( user= self.user, latitude= '1.23',", "listUser = User.objects.all() self.assertEqual(len(listUser), 1) def test_persist_profile(self): listUser = Profile.objects.all() self.assertEqual(len(listUser), 1) def", "def test_persistence_post(self): post = Post.objects.filter(user= self.user) self.assertIsNotNone(post) def test_filter_post_for_ubication(self): post = Post.objects.filter (latitude", "from AmbieNet.users.models import User, Profile from AmbieNet.posts.models import Post class LoginAPITestCase(APITestCase): \"\"\"User login", "#Model from AmbieNet.users.models import User, Profile from AmbieNet.posts.models import Post class LoginAPITestCase(APITestCase): \"\"\"User", "self.assertIsNotNone(post) def test_filter_post_for_ubication(self): post = Post.objects.filter (latitude = 3.2, longitude= 3.4) self.assertIsNotNone(post) def", "Post.objects.create( user= self.user, profile = self.profile, photo='string', title='Stunami en la casa de saenz',", "creció el rio calarca', type_catastrophe='maremoto', latitude='3.2', longitude= '3.4' ) self.data = { 'username'", "de instances that are needes in the test case.\"\"\" self.user = User.objects.create( username=", "test_persistence_post(self): post = Post.objects.filter(user= self.user) self.assertIsNotNone(post) def test_filter_post_for_ubication(self): post = Post.objects.filter (latitude =", "def test_filter_post_for_ubication(self): post = Post.objects.filter (latitude = 3.2, longitude= 3.4) self.assertIsNotNone(post) def test_search_users_home", "= Post.objects.create( user= self.user, profile = self.profile, photo='string', title='Stunami en la casa de", "user= self.user, profile = self.profile, photo='string', title='Stunami en la casa de saenz', description='se", "= '/posts/' def test_persist_user(self): listUser = User.objects.all() self.assertEqual(len(listUser), 1) def test_persist_profile(self): listUser =", "= Profile.objects.all() self.assertEqual(len(listUser), 1) def test_persistence_post(self): post = Post.objects.filter(user= self.user) self.assertIsNotNone(post) def test_filter_post_for_ubication(self):", "= Post.objects.filter(latitude = 3.2, longitude= 3.4) self.assertIsNotNone(post) def test_search_post_for_type_catastrophe(self): post = Post.objects.filter (type_catastrophe='maremoto')", "type_catastrophe='maremoto', latitude='3.2', longitude= '3.4' ) self.data = { 'username' : 'saenzavs', 'password' :", "import APITestCase #Model from AmbieNet.users.models import User, Profile from AmbieNet.posts.models import Post class", "casa de saenz', description='se les creció el rio calarca', type_catastrophe='maremoto', latitude='3.2', longitude= '3.4'", "self.user, latitude= '1.23', longitude= '1.22' ) self.post = Post.objects.create( user= self.user, profile =", "User.objects.all() self.assertEqual(len(listUser), 1) def test_persist_profile(self): listUser = Profile.objects.all() self.assertEqual(len(listUser), 1) def test_persistence_post(self): post", "longitude= '1.22' ) self.post = Post.objects.create( user= self.user, profile = self.profile, photo='string', title='Stunami", "3.2, longitude= 3.4) self.assertIsNotNone(post) def test_search_post_for_type_catastrophe(self): post = Post.objects.filter (type_catastrophe='maremoto') self.assertIsNotNone(post) def test_search_user_for_username(self):", "self.profile = Profile.objects.create( user= self.user, latitude= '1.23', longitude= '1.22' ) self.post = Post.objects.create(", "Post.objects.filter(user= self.user) self.assertIsNotNone(post) def test_filter_post_for_ubication(self): post = Post.objects.filter (latitude = 3.2, longitude= 3.4)", "phone_number= '31212231232', first_name= 'steven', last_name= 'saenz', ) self.profile = Profile.objects.create( user= self.user, latitude=", "test_persist_profile(self): listUser = Profile.objects.all() self.assertEqual(len(listUser), 1) def test_persistence_post(self): post = Post.objects.filter(user= self.user) self.assertIsNotNone(post)", "def setUp(self): \"\"\"Test case setup, building de instances that are needes in the", "(latitude = 3.2, longitude= 3.4) self.assertIsNotNone(post) def test_search_users_home (self): post = Post.objects.filter(latitude =", "les creció el rio calarca', type_catastrophe='maremoto', latitude='3.2', longitude= '3.4' ) self.data = {", "= User.objects.all() self.assertEqual(len(listUser), 1) def test_persist_profile(self): listUser = Profile.objects.all() self.assertEqual(len(listUser), 1) def test_persistence_post(self):", "def test_search_post_for_type_catastrophe(self): post = Post.objects.filter (type_catastrophe='maremoto') self.assertIsNotNone(post) def test_search_user_for_username(self): user = User.objects.filter(username='saenzavs') self.assertIsNotNone(user)", "django.test import TestCase from django.urls import reverse, path # Django REST Framework from", "User.objects.create( username= 'saenzavs', email= '<EMAIL>', password= '<PASSWORD>', phone_number= '31212231232', first_name= 'steven', last_name= 'saenz',", "1) def test_persist_profile(self): listUser = Profile.objects.all() self.assertEqual(len(listUser), 1) def test_persistence_post(self): post = Post.objects.filter(user=", "rest_framework import status from rest_framework.test import APITestCase #Model from AmbieNet.users.models import User, Profile", "la casa de saenz', description='se les creció el rio calarca', type_catastrophe='maremoto', latitude='3.2', longitude=", "'username' : 'saenzavs', 'password' : '<PASSWORD>' } self.url = '/posts/' def test_persist_user(self): listUser", "post = Post.objects.filter(latitude = 3.2, longitude= 3.4) self.assertIsNotNone(post) def test_search_post_for_type_catastrophe(self): post = Post.objects.filter", "= 3.2, longitude= 3.4) self.assertIsNotNone(post) def test_search_post_for_type_catastrophe(self): post = Post.objects.filter (type_catastrophe='maremoto') self.assertIsNotNone(post) def", "post = Post.objects.filter (latitude = 3.2, longitude= 3.4) self.assertIsNotNone(post) def test_search_users_home (self): post", "'saenzavs', email= '<EMAIL>', password= '<PASSWORD>', phone_number= '31212231232', first_name= 'steven', last_name= 'saenz', ) self.profile", "password= '<PASSWORD>', phone_number= '31212231232', first_name= 'steven', last_name= 'saenz', ) self.profile = Profile.objects.create( user=", "from django.urls import reverse, path # Django REST Framework from rest_framework import status", "Post.objects.filter (latitude = 3.2, longitude= 3.4) self.assertIsNotNone(post) def test_search_users_home (self): post = Post.objects.filter(latitude", "'saenz', ) self.profile = Profile.objects.create( user= self.user, latitude= '1.23', longitude= '1.22' ) self.post", "from rest_framework import status from rest_framework.test import APITestCase #Model from AmbieNet.users.models import User,", "'password' : '<PASSWORD>' } self.url = '/posts/' def test_persist_user(self): listUser = User.objects.all() self.assertEqual(len(listUser),", "LoginAPITestCase(APITestCase): \"\"\"User login test case.\"\"\" def setUp(self): \"\"\"Test case setup, building de instances", "description='se les creció el rio calarca', type_catastrophe='maremoto', latitude='3.2', longitude= '3.4' ) self.data =", "(type_catastrophe='maremoto') self.assertIsNotNone(post) def test_search_user_for_username(self): user = User.objects.filter(username='saenzavs') self.assertIsNotNone(user) def test_search_post_for_user(self): user = User.objects.get(username='saenzavs')", "first_name= 'steven', last_name= 'saenz', ) self.profile = Profile.objects.create( user= self.user, latitude= '1.23', longitude=", "self.user) self.assertIsNotNone(post) def test_filter_post_for_ubication(self): post = Post.objects.filter (latitude = 3.2, longitude= 3.4) self.assertIsNotNone(post)", "Profile from AmbieNet.posts.models import Post class LoginAPITestCase(APITestCase): \"\"\"User login test case.\"\"\" def setUp(self):", "self.user = User.objects.create( username= 'saenzavs', email= '<EMAIL>', password= '<PASSWORD>', phone_number= '31212231232', first_name= 'steven',", "rio calarca', type_catastrophe='maremoto', latitude='3.2', longitude= '3.4' ) self.data = { 'username' : 'saenzavs',", "def test_persist_profile(self): listUser = Profile.objects.all() self.assertEqual(len(listUser), 1) def test_persistence_post(self): post = Post.objects.filter(user= self.user)", "= Post.objects.filter(user= self.user) self.assertIsNotNone(post) def test_filter_post_for_ubication(self): post = Post.objects.filter (latitude = 3.2, longitude=", "self.user, profile = self.profile, photo='string', title='Stunami en la casa de saenz', description='se les", "\"\"\"User login test case.\"\"\" def setUp(self): \"\"\"Test case setup, building de instances that", "test_search_post_for_type_catastrophe(self): post = Post.objects.filter (type_catastrophe='maremoto') self.assertIsNotNone(post) def test_search_user_for_username(self): user = User.objects.filter(username='saenzavs') self.assertIsNotNone(user) def", "= Profile.objects.create( user= self.user, latitude= '1.23', longitude= '1.22' ) self.post = Post.objects.create( user=", "self.assertIsNotNone(post) def test_search_user_for_username(self): user = User.objects.filter(username='saenzavs') self.assertIsNotNone(user) def test_search_post_for_user(self): user = User.objects.get(username='saenzavs') post", "case.\"\"\" def setUp(self): \"\"\"Test case setup, building de instances that are needes in", "self.assertIsNotNone(post) def test_search_users_home (self): post = Post.objects.filter(latitude = 3.2, longitude= 3.4) self.assertIsNotNone(post) def", "'1.23', longitude= '1.22' ) self.post = Post.objects.create( user= self.user, profile = self.profile, photo='string',", ") self.post = Post.objects.create( user= self.user, profile = self.profile, photo='string', title='Stunami en la", "import reverse, path # Django REST Framework from rest_framework import status from rest_framework.test", "calarca', type_catastrophe='maremoto', latitude='3.2', longitude= '3.4' ) self.data = { 'username' : 'saenzavs', 'password'", "= User.objects.create( username= 'saenzavs', email= '<EMAIL>', password= '<PASSWORD>', phone_number= '31212231232', first_name= 'steven', last_name=", "self.profile, photo='string', title='Stunami en la casa de saenz', description='se les creció el rio", "title='Stunami en la casa de saenz', description='se les creció el rio calarca', type_catastrophe='maremoto',", "listUser = Profile.objects.all() self.assertEqual(len(listUser), 1) def test_persistence_post(self): post = Post.objects.filter(user= self.user) self.assertIsNotNone(post) def", "= Post.objects.filter (latitude = 3.2, longitude= 3.4) self.assertIsNotNone(post) def test_search_users_home (self): post =", "'<EMAIL>', password= '<PASSWORD>', phone_number= '31212231232', first_name= 'steven', last_name= 'saenz', ) self.profile = Profile.objects.create(", "Post.objects.filter(latitude = 3.2, longitude= 3.4) self.assertIsNotNone(post) def test_search_post_for_type_catastrophe(self): post = Post.objects.filter (type_catastrophe='maremoto') self.assertIsNotNone(post)", "that are needes in the test case.\"\"\" self.user = User.objects.create( username= 'saenzavs', email=", "= { 'username' : 'saenzavs', 'password' : '<PASSWORD>' } self.url = '/posts/' def", "en la casa de saenz', description='se les creció el rio calarca', type_catastrophe='maremoto', latitude='3.2',", "def test_search_users_home (self): post = Post.objects.filter(latitude = 3.2, longitude= 3.4) self.assertIsNotNone(post) def test_search_post_for_type_catastrophe(self):", "de saenz', description='se les creció el rio calarca', type_catastrophe='maremoto', latitude='3.2', longitude= '3.4' )", "in the test case.\"\"\" self.user = User.objects.create( username= 'saenzavs', email= '<EMAIL>', password= '<PASSWORD>',", "longitude= '3.4' ) self.data = { 'username' : 'saenzavs', 'password' : '<PASSWORD>' }", "reverse, path # Django REST Framework from rest_framework import status from rest_framework.test import", "#django from django.test import TestCase from django.urls import reverse, path # Django REST", "'1.22' ) self.post = Post.objects.create( user= self.user, profile = self.profile, photo='string', title='Stunami en", ") self.data = { 'username' : 'saenzavs', 'password' : '<PASSWORD>' } self.url =", "case setup, building de instances that are needes in the test case.\"\"\" self.user", ") self.profile = Profile.objects.create( user= self.user, latitude= '1.23', longitude= '1.22' ) self.post =", "TestCase from django.urls import reverse, path # Django REST Framework from rest_framework import", "user= self.user, latitude= '1.23', longitude= '1.22' ) self.post = Post.objects.create( user= self.user, profile", "'/posts/' def test_persist_user(self): listUser = User.objects.all() self.assertEqual(len(listUser), 1) def test_persist_profile(self): listUser = Profile.objects.all()", "test case.\"\"\" def setUp(self): \"\"\"Test case setup, building de instances that are needes", "import Post class LoginAPITestCase(APITestCase): \"\"\"User login test case.\"\"\" def setUp(self): \"\"\"Test case setup,", "profile = self.profile, photo='string', title='Stunami en la casa de saenz', description='se les creció", ": 'saenzavs', 'password' : '<PASSWORD>' } self.url = '/posts/' def test_persist_user(self): listUser =", "test_search_user_for_username(self): user = User.objects.filter(username='saenzavs') self.assertIsNotNone(user) def test_search_post_for_user(self): user = User.objects.get(username='saenzavs') post = Post.objects.get(user=user)", "last_name= 'saenz', ) self.profile = Profile.objects.create( user= self.user, latitude= '1.23', longitude= '1.22' )", "3.4) self.assertIsNotNone(post) def test_search_users_home (self): post = Post.objects.filter(latitude = 3.2, longitude= 3.4) self.assertIsNotNone(post)", "latitude= '1.23', longitude= '1.22' ) self.post = Post.objects.create( user= self.user, profile = self.profile," ]
[ "tag_pool class LoadDummyTag(BBCodeTag): name = 'loaddummy01' definition_string = '[loaddummy01]{TEXT}[/loaddummy01]' format_string = '<loaddummy>{TEXT}</loaddummy>' tag_pool.register_tag(LoadDummyTag)", "BBCodeTag from precise_bbcode.tag_pool import tag_pool class LoadDummyTag(BBCodeTag): name = 'loaddummy01' definition_string = '[loaddummy01]{TEXT}[/loaddummy01]'", "from precise_bbcode.bbcode.tag import BBCodeTag from precise_bbcode.tag_pool import tag_pool class LoadDummyTag(BBCodeTag): name = 'loaddummy01'", "precise_bbcode.tag_pool import tag_pool class LoadDummyTag(BBCodeTag): name = 'loaddummy01' definition_string = '[loaddummy01]{TEXT}[/loaddummy01]' format_string =", "import BBCodeTag from precise_bbcode.tag_pool import tag_pool class LoadDummyTag(BBCodeTag): name = 'loaddummy01' definition_string =", "precise_bbcode.bbcode.tag import BBCodeTag from precise_bbcode.tag_pool import tag_pool class LoadDummyTag(BBCodeTag): name = 'loaddummy01' definition_string", "import tag_pool class LoadDummyTag(BBCodeTag): name = 'loaddummy01' definition_string = '[loaddummy01]{TEXT}[/loaddummy01]' format_string = '<loaddummy>{TEXT}</loaddummy>'", "from precise_bbcode.tag_pool import tag_pool class LoadDummyTag(BBCodeTag): name = 'loaddummy01' definition_string = '[loaddummy01]{TEXT}[/loaddummy01]' format_string" ]
[ "rights reserved. # This file is released under the \"MIT License Agreement\". #", "is released under the \"MIT License Agreement\". # Please see the LICENSE file.", "<NAME>. # Github: https://github.com/litcoderr # All rights reserved. # This file is released", "released under the \"MIT License Agreement\". # Please see the LICENSE file. from", "# Copyright 2020 by <NAME>. # Github: https://github.com/litcoderr # All rights reserved. #", "file is released under the \"MIT License Agreement\". # Please see the LICENSE", "This file is released under the \"MIT License Agreement\". # Please see the", "the \"MIT License Agreement\". # Please see the LICENSE file. from .interface import", "Github: https://github.com/litcoderr # All rights reserved. # This file is released under the", "# All rights reserved. # This file is released under the \"MIT License", "# Github: https://github.com/litcoderr # All rights reserved. # This file is released under", "by <NAME>. # Github: https://github.com/litcoderr # All rights reserved. # This file is", "# This file is released under the \"MIT License Agreement\". # Please see", "All rights reserved. # This file is released under the \"MIT License Agreement\".", "under the \"MIT License Agreement\". # Please see the LICENSE file. from .interface", "reserved. # This file is released under the \"MIT License Agreement\". # Please", "2020 by <NAME>. # Github: https://github.com/litcoderr # All rights reserved. # This file", "\"MIT License Agreement\". # Please see the LICENSE file. from .interface import Serializable", "https://github.com/litcoderr # All rights reserved. # This file is released under the \"MIT", "Copyright 2020 by <NAME>. # Github: https://github.com/litcoderr # All rights reserved. # This" ]
[ "import os from AI import AI as ai board: TicTac def main(): mode", "typing import Any, List from Player import Player from TicTac import TicTac import", "1) print(\"{} Wins\".format(\"Nooone\" if c is None else c.name)) def _playTurn(board: TicTac, mode:", "p2 = Player('O', \"AI 2\") board = TicTac(mode, p1, p2) a = False", "1) def _askPlace() -> int: a = '' while not a.isdigit() or not", "from Player import Player from TicTac import TicTac import os from AI import", "curr = abs(curr - 1) print(\"{} Wins\".format(\"Nooone\" if c is None else c.name))", "players, curr) print(board) a, c = board.checkWin() curr = abs(curr - 1) print(\"{}", "board.checkWin() curr = abs(curr - 1) print(\"{} Wins\".format(\"Nooone\" if c is None else", "curr == 0: a = False while not a: a = board.place(players[0], _askPlace())", "1: a = input(prompt) return a def _clear() -> None: \"\"\" Clears the", "_askName(\"P1: \")) p2 = Player('O', _askName(\"P2: \")) else: p1 = Player('X', \"AI 1\")", "= board.checkWin() curr = abs(curr - 1) print(\"{} Wins\".format(\"Nooone\" if c is None", "if mode == 0: if curr == 0: a = False while not", "x + 1) elif mode == 1: a = False while not a:", "not a: a = board.place(players[0], _askPlace()) else: x, y = ai(board, players[1]) board.place(players[1],", "os from AI import AI as ai board: TicTac def main(): mode =", "TicTac def main(): mode = _selectMode() if mode == 0: p1 = Player('X',", "_playTurn(board, mode, players, curr) print(board) a, c = board.checkWin() curr = abs(curr -", "\"\"\" Tic Tac Toe Author: <NAME> Date: Oct. 11, 2019 \"\"\" from __future__", "Player('X', _askName(\"P1: \")) p2 = Player('O', \"AI\") elif mode == 1: p1 =", "input(\"Spot #: \") return int(a) def _selectMode() -> int: a = '' while", "else: x, y = ai(board, players[curr]) board.place(players[curr], y * 3 + x +", "return int(a) def _selectMode() -> int: a = '' while not a.isnumeric() or", "not a: a = board.place(players[curr], _askPlace()) else: x, y = ai(board, players[curr]) board.place(players[curr],", "<reponame>Vrim/TicTacToe \"\"\" Tic Tac Toe Author: <NAME> Date: Oct. 11, 2019 \"\"\" from", "* 3 + x + 1) def _askPlace() -> int: a = ''", "Two-Player\\n\\ 2. AI vs AI\\n\") return int(a) def _askName(prompt: str) -> str: a", "List from Player import Player from TicTac import TicTac import os from AI", "mode == 1: a = False while not a: a = board.place(players[curr], _askPlace())", "< 1: a = input(prompt) return a def _clear() -> None: \"\"\" Clears", "c = board.checkWin() curr = abs(curr - 1) print(\"{} Wins\".format(\"Nooone\" if c is", "TicTac import TicTac import os from AI import AI as ai board: TicTac", "if c is None else c.name)) def _playTurn(board: TicTac, mode: int, players: List,", "ai(board, players[curr]) board.place(players[curr], y * 3 + x + 1) def _askPlace() ->", "str) -> str: a = '' while len(a) < 1: a = input(prompt)", "1: a = False while not a: a = board.place(players[curr], _askPlace()) else: x,", "int(a) in range(3): _clear() a = input(\"Select gamemode:\\n\\ 0. One-Player\\n\\ 1. Two-Player\\n\\ 2.", "x + 1) def _askPlace() -> int: a = '' while not a.isdigit()", "_clear() a = input(\"Select gamemode:\\n\\ 0. One-Player\\n\\ 1. Two-Player\\n\\ 2. AI vs AI\\n\")", "-> None: if mode == 0: if curr == 0: a = False", "Clears the console \"\"\" os.system('cls' if os.name == 'nt' else 'clear') if __name__", "= ai(board, players[curr]) board.place(players[curr], y * 3 + x + 1) def _askPlace()", "<NAME> Date: Oct. 11, 2019 \"\"\" from __future__ import annotations from typing import", "annotations from typing import Any, List from Player import Player from TicTac import", "= Player('O', _askName(\"P2: \")) else: p1 = Player('X', \"AI 1\") p2 = Player('O',", "\"AI 1\") p2 = Player('O', \"AI 2\") board = TicTac(mode, p1, p2) a", "0 print(board) while not a: _playTurn(board, mode, players, curr) print(board) a, c =", "print(board) while not a: _playTurn(board, mode, players, curr) print(board) a, c = board.checkWin()", "else: p1 = Player('X', \"AI 1\") p2 = Player('O', \"AI 2\") board =", "print(board) a, c = board.checkWin() curr = abs(curr - 1) print(\"{} Wins\".format(\"Nooone\" if", "return int(a) def _askName(prompt: str) -> str: a = '' while len(a) <", "= False while not a: a = board.place(players[0], _askPlace()) else: x, y =", "int: a = '' while not a.isdigit() or not int(a) in range(1, 9", "'' while not a.isdigit() or not int(a) in range(1, 9 + 1): a", "c = '' players = [p1, p2] curr = 0 print(board) while not", "= '' while len(a) < 1: a = input(prompt) return a def _clear()", "from typing import Any, List from Player import Player from TicTac import TicTac", "[p1, p2] curr = 0 print(board) while not a: _playTurn(board, mode, players, curr)", "p2 = Player('O', _askName(\"P2: \")) else: p1 = Player('X', \"AI 1\") p2 =", "False c = '' players = [p1, p2] curr = 0 print(board) while", "_askPlace() -> int: a = '' while not a.isdigit() or not int(a) in", "or not int(a) in range(3): _clear() a = input(\"Select gamemode:\\n\\ 0. One-Player\\n\\ 1.", "1. Two-Player\\n\\ 2. AI vs AI\\n\") return int(a) def _askName(prompt: str) -> str:", "#: \") return int(a) def _selectMode() -> int: a = '' while not", "None: if mode == 0: if curr == 0: a = False while", "= Player('X', \"AI 1\") p2 = Player('O', \"AI 2\") board = TicTac(mode, p1,", "Author: <NAME> Date: Oct. 11, 2019 \"\"\" from __future__ import annotations from typing", "_clear() -> None: \"\"\" Clears the console \"\"\" os.system('cls' if os.name == 'nt'", "as ai board: TicTac def main(): mode = _selectMode() if mode == 0:", "None: \"\"\" Clears the console \"\"\" os.system('cls' if os.name == 'nt' else 'clear')", "x, y = ai(board, players[1]) board.place(players[1], y * 3 + x + 1)", "in range(1, 9 + 1): a = input(\"Spot #: \") return int(a) def", "c.name)) def _playTurn(board: TicTac, mode: int, players: List, curr: int) -> None: if", "not int(a) in range(3): _clear() a = input(\"Select gamemode:\\n\\ 0. One-Player\\n\\ 1. Two-Player\\n\\", "else: x, y = ai(board, players[1]) board.place(players[1], y * 3 + x +", "ai board: TicTac def main(): mode = _selectMode() if mode == 0: p1", "a.isnumeric() or not int(a) in range(3): _clear() a = input(\"Select gamemode:\\n\\ 0. One-Player\\n\\", "a = board.place(players[0], _askPlace()) else: x, y = ai(board, players[1]) board.place(players[1], y *", "\"\"\" from __future__ import annotations from typing import Any, List from Player import", "Wins\".format(\"Nooone\" if c is None else c.name)) def _playTurn(board: TicTac, mode: int, players:", "a: a = board.place(players[curr], _askPlace()) else: x, y = ai(board, players[curr]) board.place(players[curr], y", "board.place(players[0], _askPlace()) else: x, y = ai(board, players[1]) board.place(players[1], y * 3 +", "a = input(\"Spot #: \") return int(a) def _selectMode() -> int: a =", "players: List, curr: int) -> None: if mode == 0: if curr ==", "a = False while not a: a = board.place(players[curr], _askPlace()) else: x, y", "Player('O', _askName(\"P2: \")) else: p1 = Player('X', \"AI 1\") p2 = Player('O', \"AI", "+ 1) elif mode == 1: a = False while not a: a", "input(\"Select gamemode:\\n\\ 0. One-Player\\n\\ 1. Two-Player\\n\\ 2. AI vs AI\\n\") return int(a) def", "Tic Tac Toe Author: <NAME> Date: Oct. 11, 2019 \"\"\" from __future__ import", "= Player('O', \"AI 2\") board = TicTac(mode, p1, p2) a = False c", "board: TicTac def main(): mode = _selectMode() if mode == 0: p1 =", "is None else c.name)) def _playTurn(board: TicTac, mode: int, players: List, curr: int)", "+ x + 1) elif mode == 1: a = False while not", "+ 1) def _askPlace() -> int: a = '' while not a.isdigit() or", "a: a = board.place(players[0], _askPlace()) else: x, y = ai(board, players[1]) board.place(players[1], y", "while not a.isdigit() or not int(a) in range(1, 9 + 1): a =", "def _askName(prompt: str) -> str: a = '' while len(a) < 1: a", "len(a) < 1: a = input(prompt) return a def _clear() -> None: \"\"\"", "a = '' while len(a) < 1: a = input(prompt) return a def", "_askName(\"P2: \")) else: p1 = Player('X', \"AI 1\") p2 = Player('O', \"AI 2\")", "_askName(\"P1: \")) p2 = Player('O', \"AI\") elif mode == 1: p1 = Player('X',", "def _playTurn(board: TicTac, mode: int, players: List, curr: int) -> None: if mode", "elif mode == 1: p1 = Player('X', _askName(\"P1: \")) p2 = Player('O', _askName(\"P2:", "else c.name)) def _playTurn(board: TicTac, mode: int, players: List, curr: int) -> None:", "while len(a) < 1: a = input(prompt) return a def _clear() -> None:", "== 1: p1 = Player('X', _askName(\"P1: \")) p2 = Player('O', _askName(\"P2: \")) else:", "Tac Toe Author: <NAME> Date: Oct. 11, 2019 \"\"\" from __future__ import annotations", "Player('O', \"AI\") elif mode == 1: p1 = Player('X', _askName(\"P1: \")) p2 =", "-> None: \"\"\" Clears the console \"\"\" os.system('cls' if os.name == 'nt' else", "a.isdigit() or not int(a) in range(1, 9 + 1): a = input(\"Spot #:", "board.place(players[curr], y * 3 + x + 1) def _askPlace() -> int: a", "def main(): mode = _selectMode() if mode == 0: p1 = Player('X', _askName(\"P1:", "0: p1 = Player('X', _askName(\"P1: \")) p2 = Player('O', \"AI\") elif mode ==", "= ai(board, players[1]) board.place(players[1], y * 3 + x + 1) elif mode", "def _askPlace() -> int: a = '' while not a.isdigit() or not int(a)", "2. AI vs AI\\n\") return int(a) def _askName(prompt: str) -> str: a =", "False while not a: a = board.place(players[curr], _askPlace()) else: x, y = ai(board,", "3 + x + 1) def _askPlace() -> int: a = '' while", "p1 = Player('X', \"AI 1\") p2 = Player('O', \"AI 2\") board = TicTac(mode,", "p1 = Player('X', _askName(\"P1: \")) p2 = Player('O', \"AI\") elif mode == 1:", "mode: int, players: List, curr: int) -> None: if mode == 0: if", "import TicTac import os from AI import AI as ai board: TicTac def", "input(prompt) return a def _clear() -> None: \"\"\" Clears the console \"\"\" os.system('cls'", "mode, players, curr) print(board) a, c = board.checkWin() curr = abs(curr - 1)", "-> int: a = '' while not a.isdigit() or not int(a) in range(1,", "import annotations from typing import Any, List from Player import Player from TicTac", "= input(prompt) return a def _clear() -> None: \"\"\" Clears the console \"\"\"", "int, players: List, curr: int) -> None: if mode == 0: if curr", "return a def _clear() -> None: \"\"\" Clears the console \"\"\" os.system('cls' if", "or not int(a) in range(1, 9 + 1): a = input(\"Spot #: \")", "range(1, 9 + 1): a = input(\"Spot #: \") return int(a) def _selectMode()", "1\") p2 = Player('O', \"AI 2\") board = TicTac(mode, p1, p2) a =", "from AI import AI as ai board: TicTac def main(): mode = _selectMode()", "= _selectMode() if mode == 0: p1 = Player('X', _askName(\"P1: \")) p2 =", "= 0 print(board) while not a: _playTurn(board, mode, players, curr) print(board) a, c", "- 1) print(\"{} Wins\".format(\"Nooone\" if c is None else c.name)) def _playTurn(board: TicTac,", "mode == 0: if curr == 0: a = False while not a:", "def _selectMode() -> int: a = '' while not a.isnumeric() or not int(a)", "False while not a: a = board.place(players[0], _askPlace()) else: x, y = ai(board,", "\")) else: p1 = Player('X', \"AI 1\") p2 = Player('O', \"AI 2\") board", "a = '' while not a.isdigit() or not int(a) in range(1, 9 +", "\")) p2 = Player('O', _askName(\"P2: \")) else: p1 = Player('X', \"AI 1\") p2", "curr: int) -> None: if mode == 0: if curr == 0: a", "'' while len(a) < 1: a = input(prompt) return a def _clear() ->", "= Player('X', _askName(\"P1: \")) p2 = Player('O', \"AI\") elif mode == 1: p1", "not a.isdigit() or not int(a) in range(1, 9 + 1): a = input(\"Spot", "from TicTac import TicTac import os from AI import AI as ai board:", "0: if curr == 0: a = False while not a: a =", "AI as ai board: TicTac def main(): mode = _selectMode() if mode ==", "Oct. 11, 2019 \"\"\" from __future__ import annotations from typing import Any, List", "= '' while not a.isdigit() or not int(a) in range(1, 9 + 1):", "One-Player\\n\\ 1. Two-Player\\n\\ 2. AI vs AI\\n\") return int(a) def _askName(prompt: str) ->", "the console \"\"\" os.system('cls' if os.name == 'nt' else 'clear') if __name__ ==", "Toe Author: <NAME> Date: Oct. 11, 2019 \"\"\" from __future__ import annotations from", "Player from TicTac import TicTac import os from AI import AI as ai", "2019 \"\"\" from __future__ import annotations from typing import Any, List from Player", "not a: _playTurn(board, mode, players, curr) print(board) a, c = board.checkWin() curr =", "1: p1 = Player('X', _askName(\"P1: \")) p2 = Player('O', _askName(\"P2: \")) else: p1", "import Any, List from Player import Player from TicTac import TicTac import os", "\") return int(a) def _selectMode() -> int: a = '' while not a.isnumeric()", "== 0: p1 = Player('X', _askName(\"P1: \")) p2 = Player('O', \"AI\") elif mode", "from __future__ import annotations from typing import Any, List from Player import Player", "not a.isnumeric() or not int(a) in range(3): _clear() a = input(\"Select gamemode:\\n\\ 0.", "console \"\"\" os.system('cls' if os.name == 'nt' else 'clear') if __name__ == \"__main__\":", "while not a: a = board.place(players[0], _askPlace()) else: x, y = ai(board, players[1])", "= input(\"Select gamemode:\\n\\ 0. One-Player\\n\\ 1. Two-Player\\n\\ 2. AI vs AI\\n\") return int(a)", "11, 2019 \"\"\" from __future__ import annotations from typing import Any, List from", "= Player('X', _askName(\"P1: \")) p2 = Player('O', _askName(\"P2: \")) else: p1 = Player('X',", "_selectMode() -> int: a = '' while not a.isnumeric() or not int(a) in", "\"\"\" Clears the console \"\"\" os.system('cls' if os.name == 'nt' else 'clear') if", "_askPlace()) else: x, y = ai(board, players[1]) board.place(players[1], y * 3 + x", "while not a: a = board.place(players[curr], _askPlace()) else: x, y = ai(board, players[curr])", "== 1: a = False while not a: a = board.place(players[curr], _askPlace()) else:", "= False while not a: a = board.place(players[curr], _askPlace()) else: x, y =", "1): a = input(\"Spot #: \") return int(a) def _selectMode() -> int: a", "'' while not a.isnumeric() or not int(a) in range(3): _clear() a = input(\"Select", "in range(3): _clear() a = input(\"Select gamemode:\\n\\ 0. One-Player\\n\\ 1. Two-Player\\n\\ 2. AI", "int(a) def _askName(prompt: str) -> str: a = '' while len(a) < 1:", "players = [p1, p2] curr = 0 print(board) while not a: _playTurn(board, mode,", "'' players = [p1, p2] curr = 0 print(board) while not a: _playTurn(board,", "Player('O', \"AI 2\") board = TicTac(mode, p1, p2) a = False c =", "if mode == 0: p1 = Player('X', _askName(\"P1: \")) p2 = Player('O', \"AI\")", "curr = 0 print(board) while not a: _playTurn(board, mode, players, curr) print(board) a,", "print(\"{} Wins\".format(\"Nooone\" if c is None else c.name)) def _playTurn(board: TicTac, mode: int,", "board.place(players[curr], _askPlace()) else: x, y = ai(board, players[curr]) board.place(players[curr], y * 3 +", "_askName(prompt: str) -> str: a = '' while len(a) < 1: a =", "\"AI\") elif mode == 1: p1 = Player('X', _askName(\"P1: \")) p2 = Player('O',", "board.place(players[1], y * 3 + x + 1) elif mode == 1: a", "str: a = '' while len(a) < 1: a = input(prompt) return a", "Player('X', \"AI 1\") p2 = Player('O', \"AI 2\") board = TicTac(mode, p1, p2)", "+ x + 1) def _askPlace() -> int: a = '' while not", "int(a) in range(1, 9 + 1): a = input(\"Spot #: \") return int(a)", "gamemode:\\n\\ 0. One-Player\\n\\ 1. Two-Player\\n\\ 2. AI vs AI\\n\") return int(a) def _askName(prompt:", "a, c = board.checkWin() curr = abs(curr - 1) print(\"{} Wins\".format(\"Nooone\" if c", "= abs(curr - 1) print(\"{} Wins\".format(\"Nooone\" if c is None else c.name)) def", "-> str: a = '' while len(a) < 1: a = input(prompt) return", "Any, List from Player import Player from TicTac import TicTac import os from", "import AI as ai board: TicTac def main(): mode = _selectMode() if mode", "curr) print(board) a, c = board.checkWin() curr = abs(curr - 1) print(\"{} Wins\".format(\"Nooone\"", "vs AI\\n\") return int(a) def _askName(prompt: str) -> str: a = '' while", "p1 = Player('X', _askName(\"P1: \")) p2 = Player('O', _askName(\"P2: \")) else: p1 =", "a def _clear() -> None: \"\"\" Clears the console \"\"\" os.system('cls' if os.name", "List, curr: int) -> None: if mode == 0: if curr == 0:", "range(3): _clear() a = input(\"Select gamemode:\\n\\ 0. One-Player\\n\\ 1. Two-Player\\n\\ 2. AI vs", "p1, p2) a = False c = '' players = [p1, p2] curr", "= False c = '' players = [p1, p2] curr = 0 print(board)", "= '' while not a.isnumeric() or not int(a) in range(3): _clear() a =", "9 + 1): a = input(\"Spot #: \") return int(a) def _selectMode() ->", "abs(curr - 1) print(\"{} Wins\".format(\"Nooone\" if c is None else c.name)) def _playTurn(board:", "\"\"\" os.system('cls' if os.name == 'nt' else 'clear') if __name__ == \"__main__\": main()", "p2] curr = 0 print(board) while not a: _playTurn(board, mode, players, curr) print(board)", "== 0: if curr == 0: a = False while not a: a", "AI import AI as ai board: TicTac def main(): mode = _selectMode() if", "y = ai(board, players[curr]) board.place(players[curr], y * 3 + x + 1) def", "if curr == 0: a = False while not a: a = board.place(players[0],", "int(a) def _selectMode() -> int: a = '' while not a.isnumeric() or not", "= board.place(players[0], _askPlace()) else: x, y = ai(board, players[1]) board.place(players[1], y * 3", "TicTac import os from AI import AI as ai board: TicTac def main():", "main(): mode = _selectMode() if mode == 0: p1 = Player('X', _askName(\"P1: \"))", "_askPlace()) else: x, y = ai(board, players[curr]) board.place(players[curr], y * 3 + x", "c is None else c.name)) def _playTurn(board: TicTac, mode: int, players: List, curr:", "Player import Player from TicTac import TicTac import os from AI import AI", "import Player from TicTac import TicTac import os from AI import AI as", "None else c.name)) def _playTurn(board: TicTac, mode: int, players: List, curr: int) ->", "\"AI 2\") board = TicTac(mode, p1, p2) a = False c = ''", "= Player('O', \"AI\") elif mode == 1: p1 = Player('X', _askName(\"P1: \")) p2", "mode == 1: p1 = Player('X', _askName(\"P1: \")) p2 = Player('O', _askName(\"P2: \"))", "Date: Oct. 11, 2019 \"\"\" from __future__ import annotations from typing import Any,", "= [p1, p2] curr = 0 print(board) while not a: _playTurn(board, mode, players,", "y = ai(board, players[1]) board.place(players[1], y * 3 + x + 1) elif", "2\") board = TicTac(mode, p1, p2) a = False c = '' players", "== 0: a = False while not a: a = board.place(players[0], _askPlace()) else:", "ai(board, players[1]) board.place(players[1], y * 3 + x + 1) elif mode ==", "players[1]) board.place(players[1], y * 3 + x + 1) elif mode == 1:", "* 3 + x + 1) elif mode == 1: a = False", "1) elif mode == 1: a = False while not a: a =", "a = input(prompt) return a def _clear() -> None: \"\"\" Clears the console", "TicTac(mode, p1, p2) a = False c = '' players = [p1, p2]", "elif mode == 1: a = False while not a: a = board.place(players[curr],", "while not a: _playTurn(board, mode, players, curr) print(board) a, c = board.checkWin() curr", "p2 = Player('O', \"AI\") elif mode == 1: p1 = Player('X', _askName(\"P1: \"))", "= TicTac(mode, p1, p2) a = False c = '' players = [p1,", "3 + x + 1) elif mode == 1: a = False while", "not int(a) in range(1, 9 + 1): a = input(\"Spot #: \") return", "_selectMode() if mode == 0: p1 = Player('X', _askName(\"P1: \")) p2 = Player('O',", "0. One-Player\\n\\ 1. Two-Player\\n\\ 2. AI vs AI\\n\") return int(a) def _askName(prompt: str)", "while not a.isnumeric() or not int(a) in range(3): _clear() a = input(\"Select gamemode:\\n\\", "AI\\n\") return int(a) def _askName(prompt: str) -> str: a = '' while len(a)", "a = '' while not a.isnumeric() or not int(a) in range(3): _clear() a", "int) -> None: if mode == 0: if curr == 0: a =", "a = False while not a: a = board.place(players[0], _askPlace()) else: x, y", "x, y = ai(board, players[curr]) board.place(players[curr], y * 3 + x + 1)", "mode == 0: p1 = Player('X', _askName(\"P1: \")) p2 = Player('O', \"AI\") elif", "board = TicTac(mode, p1, p2) a = False c = '' players =", "int: a = '' while not a.isnumeric() or not int(a) in range(3): _clear()", "+ 1): a = input(\"Spot #: \") return int(a) def _selectMode() -> int:", "-> int: a = '' while not a.isnumeric() or not int(a) in range(3):", "= board.place(players[curr], _askPlace()) else: x, y = ai(board, players[curr]) board.place(players[curr], y * 3", "TicTac, mode: int, players: List, curr: int) -> None: if mode == 0:", "a = input(\"Select gamemode:\\n\\ 0. One-Player\\n\\ 1. Two-Player\\n\\ 2. AI vs AI\\n\") return", "__future__ import annotations from typing import Any, List from Player import Player from", "players[curr]) board.place(players[curr], y * 3 + x + 1) def _askPlace() -> int:", "Player('X', _askName(\"P1: \")) p2 = Player('O', _askName(\"P2: \")) else: p1 = Player('X', \"AI", "a = board.place(players[curr], _askPlace()) else: x, y = ai(board, players[curr]) board.place(players[curr], y *", "\")) p2 = Player('O', \"AI\") elif mode == 1: p1 = Player('X', _askName(\"P1:", "0: a = False while not a: a = board.place(players[0], _askPlace()) else: x,", "a: _playTurn(board, mode, players, curr) print(board) a, c = board.checkWin() curr = abs(curr", "_playTurn(board: TicTac, mode: int, players: List, curr: int) -> None: if mode ==", "= input(\"Spot #: \") return int(a) def _selectMode() -> int: a = ''", "AI vs AI\\n\") return int(a) def _askName(prompt: str) -> str: a = ''", "= '' players = [p1, p2] curr = 0 print(board) while not a:", "def _clear() -> None: \"\"\" Clears the console \"\"\" os.system('cls' if os.name ==", "mode = _selectMode() if mode == 0: p1 = Player('X', _askName(\"P1: \")) p2", "p2) a = False c = '' players = [p1, p2] curr =", "y * 3 + x + 1) elif mode == 1: a =", "a = False c = '' players = [p1, p2] curr = 0", "y * 3 + x + 1) def _askPlace() -> int: a =" ]
[ "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,", "def get_link_type(self): try: return ExtractLink.objects.get(link_id=self.link_id).link_type except ExtractLink.DoesNotExist: return \"\" def get_extract(self): from apps.log_extract.handlers.extract", "\" \".join(ret) download_file_detail.short_description = _(\"下载文件统计\") class ExtractLink(OperateRecordModel): name = models.CharField(_(\"链路名称\"), max_length=255) link_id =", "get_link_type(self): try: return ExtractLink.objects.get(link_id=self.link_id).link_type except ExtractLink.DoesNotExist: return \"\" def get_extract(self): from apps.log_extract.handlers.extract import", "download_status = models.CharField(_(\"当前文件下载状态\"), max_length=64, null=True, blank=True) expiration_date = models.DateTimeField(_(\"任务过期时间\"), default=None) pipeline_id = models.CharField(_(\"流水线ID\"),", "0)) for ip in self.ex_data.values()) ret = [ f\"{_('下载文件总大小')}: {all_pack_file_size}kb\", f\"{_('下载原始文件原始总大小')}: {all_file_size}kb\", f\"{_('下载文件总数量')}:", "MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "datetime import datetime from functools import reduce from typing import List from django.db", "task_process_info = models.TextField(_(\"任务过程信息\"), null=True, blank=True) remark = models.TextField(_(\"备注\"), null=True, blank=True) preview_directory = models.CharField(_(\"预览目录\"),", "0)) for ip in self.ex_data.values()) all_file_num = sum(int(ip.get(\"file_count\", 0)) for ip in self.ex_data.values())", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED", "= models.BooleanField(_(\"是否启用\"), default=True) created_at = models.DateTimeField(_(\"创建时间\"), auto_now_add=True, blank=True, db_index=True, null=True) class Meta: verbose_name", "null=True, blank=True) remark = models.TextField(_(\"备注\"), null=True, blank=True) preview_directory = models.CharField(_(\"预览目录\"), null=True, blank=True, max_length=255)", "preview_is_search_child = models.BooleanField(_(\"预览是否搜索子目录\"), default=False, blank=True) preview_start_time = models.CharField(_(\"预览开始日期\"), null=True, blank=True, max_length=20) preview_end_time =", "this software and associated documentation files (the \"Software\"), to deal in the Software", "OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "# 根据票据向云石网盘发起下载请求 cstone_download_random = models.TextField(_(\"下载随机值\"), null=True, blank=True) task_process_info = models.TextField(_(\"任务过程信息\"), null=True, blank=True) remark", "_cal_total_time(self, components: List[dict]): return sum( [ ( datetime.strptime(component[\"finish_time\"], PIPELINE_TIME_FORMAT) - datetime.strptime(component[\"start_time\"], PIPELINE_TIME_FORMAT) ).seconds", "ret = [ f\"{_('下载文件总大小')}: {all_pack_file_size}kb\", f\"{_('下载原始文件原始总大小')}: {all_file_size}kb\", f\"{_('下载文件总数量')}: {all_file_num}\", ] return \" \".join(ret)", "default=True) created_at = models.DateTimeField(_(\"创建时间\"), auto_now_add=True, blank=True, db_index=True, null=True) class Meta: verbose_name = _(\"提取链路", "blank=True, help_text=_(\"内网链路不需要填写\") ) is_enable = models.BooleanField(_(\"是否启用\"), default=True) created_at = models.DateTimeField(_(\"创建时间\"), auto_now_add=True, blank=True, db_index=True,", "= models.TextField(_(\"备注\"), null=True, blank=True) preview_directory = models.CharField(_(\"预览目录\"), null=True, blank=True, max_length=255) preview_ip = models.TextField(_(\"预览地址ip\"),", "= models.DateTimeField(_(\"任务过期时间\"), default=None) pipeline_id = models.CharField(_(\"流水线ID\"), max_length=128, null=True, blank=True, db_index=True) pipeline_components_id = JsonField(_(\"流水线组件ID\"),", "import Q from django.utils.translation import ugettext_lazy as _ from apps.utils.log import logger from", "import ugettext_lazy as _ from apps.utils.log import logger from apps.models import ( OperateRecordModel,", "max_length=16) modules = JsonField(_(\"模块列表\")) visible_dir = MultiStrSplitByCommaFieldText(_(\"目录列表\")) file_type = MultiStrSplitByCommaFieldText(_(\"文件类型\")) operator = models.CharField(_(\"作业执行人\"),", "= JsonField(_(\"流水线组件ID\"), null=True, blank=True) job_task_id = models.BigIntegerField(_(\"文件分发ID\"), null=True, blank=True) # 调创建上传任务的API cstone_upload_ticket =", "blank=True) task_process_info = models.TextField(_(\"任务过程信息\"), null=True, blank=True) remark = models.TextField(_(\"备注\"), null=True, blank=True) preview_directory =", "_(\"提取链路 (第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") class ExtractLinkHost(models.Model): target_dir = models.CharField(_(\"挂载目录\"), max_length=255, default=\"\") bk_cloud_id", "OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", "MIT License. License for BK-LOG 蓝鲸日志平台: -------------------------------------------------------------------- Permission is hereby granted, free of", "CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "_ from apps.utils.log import logger from apps.models import ( OperateRecordModel, SoftDeleteModel, JsonField, MultiStrSplitByCommaFieldText,", "= models.CharField(_(\"预览开始日期\"), null=True, blank=True, max_length=20) preview_end_time = models.CharField(_(\"预览结束日期\"), null=True, blank=True, max_length=20) ex_data =", "the Software without restriction, including without limitation the rights to use, copy, modify,", "strategy_id = models.AutoField(_(\"策略ID\"), primary_key=True, default=None) bk_biz_id = models.IntegerField(_(\"业务ID\"), db_index=True) strategy_name = models.TextField(_(\"策略名称\")) user_list", "person obtaining a copy of this software and associated documentation files (the \"Software\"),", "= models.TextField(_(\"下载随机值\"), null=True, blank=True) task_process_info = models.TextField(_(\"任务过程信息\"), null=True, blank=True) remark = models.TextField(_(\"备注\"), null=True,", "the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies", "PIPELINE_TIME_FORMAT) ).seconds for component in components if component[\"finish_time\"] is not None ] )", "= sum(int(ip.get(\"all_origin_file_size\", 0)) for ip in self.ex_data.values()) all_file_num = sum(int(ip.get(\"file_count\", 0)) for ip", "models.CharField(_(\"预览目录\"), null=True, blank=True, max_length=255) preview_ip = models.TextField(_(\"预览地址ip\"), null=True, blank=True) preview_time_range = models.CharField(_(\"预览日期\"), max_length=10,", "= [ f\"{_('下载文件总大小')}: {all_pack_file_size}kb\", f\"{_('下载原始文件原始总大小')}: {all_file_size}kb\", f\"{_('下载文件总数量')}: {all_file_num}\", ] return \" \".join(ret) download_file_detail.short_description", "\"\"\" import operator from datetime import datetime from functools import reduce from typing", ") is_enable = models.BooleanField(_(\"是否启用\"), default=True) created_at = models.DateTimeField(_(\"创建时间\"), auto_now_add=True, blank=True, db_index=True, null=True) class", "from functools import reduce from typing import List from django.db import models from", "logger from apps.models import ( OperateRecordModel, SoftDeleteModel, JsonField, MultiStrSplitByCommaFieldText, OperateRecordModelManager, EncryptionField, ) from", "without restriction, including without limitation the rights to use, copy, modify, merge, publish,", "merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit", "default=None) bk_biz_id = models.IntegerField(_(\"业务ID\"), db_index=True) strategy_name = models.TextField(_(\"策略名称\")) user_list = MultiStrSplitByCommaFieldText(_(\"用户ID\")) select_type =", "[ ( datetime.strptime(component[\"finish_time\"], PIPELINE_TIME_FORMAT) - datetime.strptime(component[\"start_time\"], PIPELINE_TIME_FORMAT) ).seconds for component in components if", "all_file_size = sum(int(ip.get(\"all_origin_file_size\", 0)) for ip in self.ex_data.values()) all_file_num = sum(int(ip.get(\"file_count\", 0)) for", "null=True, blank=True, max_length=20) ex_data = JsonField(_(\"额外数据\"), null=True, blank=True) cos_file_name = models.CharField(_(\"cos对象文件名称\"), null=True, blank=True,", "根据票据向云石网盘发起下载请求 cstone_download_random = models.TextField(_(\"下载随机值\"), null=True, blank=True) task_process_info = models.TextField(_(\"任务过程信息\"), null=True, blank=True) remark =", "models.IntegerField(_(\"业务id\"), db_index=True) ip_list = MultiStrSplitByCommaFieldText(_(\"业务机器ip\")) file_path = MultiStrSplitByCommaFieldText(_(\"文件列表\")) filter_type = models.CharField(_(\"过滤类型\"), max_length=16, null=True,", "blank=True) cstone_upload_random = models.TextField(_(\"上传随机值\"), null=True, blank=True) # 创建中转服务器到云石的上传任务 job_upload_task_id = models.BigIntegerField(_(\"任务上传ID\"), null=True, blank=True)", "JsonField, MultiStrSplitByCommaFieldText, OperateRecordModelManager, EncryptionField, ) from apps.log_extract.constants import ExtractLinkType, PIPELINE_TIME_FORMAT from pipeline.service import", "ExtractLink.DoesNotExist: return \"\" def get_extract(self): from apps.log_extract.handlers.extract import ExtractLinkFactory return ExtractLinkFactory.get_link(self.get_link_type())() def get_link(self):", "= models.TextField(_(\"任务过程信息\"), null=True, blank=True) remark = models.TextField(_(\"备注\"), null=True, blank=True) preview_directory = models.CharField(_(\"预览目录\"), null=True,", "blank=True) # 调创建上传任务的API cstone_upload_ticket = models.BigIntegerField(_(\"上传票据\"), null=True, blank=True) cstone_upload_random = models.TextField(_(\"上传随机值\"), null=True, blank=True)", "sum(int(ip.get(\"all_pack_file_size\", 0)) for ip in self.ex_data.values()) ret = [ f\"{_('下载文件总大小')}: {all_pack_file_size}kb\", f\"{_('下载原始文件原始总大小')}: {all_file_size}kb\",", ") total_elapsed.short_description = _(\"总耗时\") def ip_num(self): return len(self.ip_list) ip_num.short_description = _(\"IP数量\") def download_file_detail(self):", "in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED", "= models.CharField(_(\"链路名称\"), max_length=255) link_id = models.AutoField(_(\"链路id\"), primary_key=True) link_type = models.CharField(_(\"链路类型\"), max_length=20, default=ExtractLinkType.COMMON.value) operator", "sublicense, and/or sell copies of the Software, and to permit persons to whom", "this permission notice shall be included in all copies or substantial portions of", "task_service class Strategies(SoftDeleteModel): \"\"\"用户策略表\"\"\" strategy_id = models.AutoField(_(\"策略ID\"), primary_key=True, default=None) bk_biz_id = models.IntegerField(_(\"业务ID\"), db_index=True)", "List[dict]): return sum( [ ( datetime.strptime(component[\"finish_time\"], PIPELINE_TIME_FORMAT) - datetime.strptime(component[\"start_time\"], PIPELINE_TIME_FORMAT) ).seconds for component", "modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to", "ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "blank=True, db_index=True, null=True) class Meta: verbose_name = _(\"提取链路 (第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") verbose_name_plural", "[Q(**{f\"{field}__icontains\": keyword}) for field in self.search_fields] filter_q = reduce(operator.or_, filter_query) return self.filter(filter_q) return", "default=None) pipeline_id = models.CharField(_(\"流水线ID\"), max_length=128, null=True, blank=True, db_index=True) pipeline_components_id = JsonField(_(\"流水线组件ID\"), null=True, blank=True)", "\"\"\"用户策略表\"\"\" strategy_id = models.AutoField(_(\"策略ID\"), primary_key=True, default=None) bk_biz_id = models.IntegerField(_(\"业务ID\"), db_index=True) strategy_name = models.TextField(_(\"策略名称\"))", "null=True, blank=True, max_length=20) preview_end_time = models.CharField(_(\"预览结束日期\"), null=True, blank=True, max_length=20) ex_data = JsonField(_(\"额外数据\"), null=True,", "null=True, blank=True, max_length=255) link_id = models.IntegerField(_(\"链路id\"), null=True, blank=True) class Meta: ordering = [\"-created_at\"]", "import datetime from functools import reduce from typing import List from django.db import", "return self class Tasks(OperateRecordModel): \"\"\"任务记录 一个\"下载\"行为记作一个\"Task\" \"\"\" objects = TasksManager() task_id = models.AutoField(_(\"任务记录id\"),", "= models.BigIntegerField(_(\"云石上传ID\"), null=True, blank=True) # 用于查询中转服务器到云石的上传情况 # 云石上待下载的文件路径 cstone_file_path = models.CharField(_(\"云石文件路径\"), default=None, max_length=64,", "WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "user_list = MultiStrSplitByCommaFieldText(_(\"用户ID\")) select_type = models.CharField(_(\"目标选择类型\"), max_length=16) modules = JsonField(_(\"模块列表\")) visible_dir = MultiStrSplitByCommaFieldText(_(\"目录列表\"))", "in self.search_fields] filter_q = reduce(operator.or_, filter_query) return self.filter(filter_q) return self class Tasks(OperateRecordModel): \"\"\"任务记录", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF", "max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") ) qcloud_cos_region = models.CharField( _(\"腾讯云Cos区域\"), max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\")", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A", "ip_list = MultiStrSplitByCommaFieldText(_(\"业务机器ip\")) file_path = MultiStrSplitByCommaFieldText(_(\"文件列表\")) filter_type = models.CharField(_(\"过滤类型\"), max_length=16, null=True, blank=True) filter_content", "= models.BigIntegerField(_(\"云石下载业务ID\"), null=True, blank=True) cstone_download_ticket = models.BigIntegerField(_(\"下载票据\"), null=True, blank=True) # 根据票据向云石网盘发起下载请求 cstone_download_random =", "download_file_detail.short_description = _(\"下载文件统计\") class ExtractLink(OperateRecordModel): name = models.CharField(_(\"链路名称\"), max_length=255) link_id = models.AutoField(_(\"链路id\"), primary_key=True)", "models.CharField( _(\"腾讯云Cos区域\"), max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") ) is_enable = models.BooleanField(_(\"是否启用\"), default=True) created_at =", "存在多主机,单主机日志下载的情况,因此有可能有些pipeline节点未执行 logger.info(\"pipeline任务不存在,pipeline_id=>[{}]\".format(self.pipeline_id)) return \"0s\" component_status_list = [] if \"activities\" not in self.pipeline_components_id: return", "models.TextField(_(\"下载随机值\"), null=True, blank=True) task_process_info = models.TextField(_(\"任务过程信息\"), null=True, blank=True) remark = models.TextField(_(\"备注\"), null=True, blank=True)", "component_info[\"name\"] component_status_list.append(task_status[\"children\"][component_id]) except KeyError: pass return f\"{self._cal_total_time(component_status_list)}s\" def _cal_total_time(self, components: List[dict]): return sum(", "blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_cos_bucket = models.CharField( _(\"腾讯云Cos桶名称\"), max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") ) qcloud_cos_region =", "notice and this permission notice shall be included in all copies or substantial", "max_length=255) op_bk_biz_id = models.IntegerField(_(\"执行bk_biz_id\")) qcloud_secret_id = EncryptionField(_(\"腾讯云SecretId\"), default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_secret_key =", "def search(self, keyword): if keyword: filter_query = [Q(**{f\"{field}__icontains\": keyword}) for field in self.search_fields]", "= models.CharField(_(\"流水线ID\"), max_length=128, null=True, blank=True, db_index=True) pipeline_components_id = JsonField(_(\"流水线组件ID\"), null=True, blank=True) job_task_id =", "null=True, blank=True) class Meta: ordering = [\"-created_at\"] def get_link_type(self): try: return ExtractLink.objects.get(link_id=self.link_id).link_type except", "TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE", "= models.TextField(_(\"策略名称\")) user_list = MultiStrSplitByCommaFieldText(_(\"用户ID\")) select_type = models.CharField(_(\"目标选择类型\"), max_length=16) modules = JsonField(_(\"模块列表\")) visible_dir", "in self.pipeline_components_id[\"activities\"].items(): # 这里有可能有些pipeline组件并未执行 try: task_status[\"children\"][component_id][\"name\"] = component_info[\"name\"] component_status_list.append(task_status[\"children\"][component_id]) except KeyError: pass return", "charge, to any person obtaining a copy of this software and associated documentation", "blank=True) job_task_id = models.BigIntegerField(_(\"文件分发ID\"), null=True, blank=True) # 调创建上传任务的API cstone_upload_ticket = models.BigIntegerField(_(\"上传票据\"), null=True, blank=True)", "= component_info[\"name\"] component_status_list.append(task_status[\"children\"][component_id]) except KeyError: pass return f\"{self._cal_total_time(component_status_list)}s\" def _cal_total_time(self, components: List[dict]): return", "models.CharField(_(\"作业执行人\"), max_length=64, default=\"\") class Meta: ordering = [\"-updated_at\"] class TasksManager(OperateRecordModelManager): search_fields = [\"ip_list\",", "KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "models.BigIntegerField(_(\"下载票据\"), null=True, blank=True) # 根据票据向云石网盘发起下载请求 cstone_download_random = models.TextField(_(\"下载随机值\"), null=True, blank=True) task_process_info = models.TextField(_(\"任务过程信息\"),", "help_text=_(\"内网链路不需要填写\")) qcloud_cos_bucket = models.CharField( _(\"腾讯云Cos桶名称\"), max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") ) qcloud_cos_region = models.CharField(", "models.CharField(_(\"预览结束日期\"), null=True, blank=True, max_length=20) ex_data = JsonField(_(\"额外数据\"), null=True, blank=True) cos_file_name = models.CharField(_(\"cos对象文件名称\"), null=True,", "License. License for BK-LOG 蓝鲸日志平台: -------------------------------------------------------------------- Permission is hereby granted, free of charge,", "components if component[\"finish_time\"] is not None ] ) total_elapsed.short_description = _(\"总耗时\") def ip_num(self):", "sum(int(ip.get(\"file_count\", 0)) for ip in self.ex_data.values()) all_pack_file_size = sum(int(ip.get(\"all_pack_file_size\", 0)) for ip in", "max_length=64, null=True, blank=True) expiration_date = models.DateTimeField(_(\"任务过期时间\"), default=None) pipeline_id = models.CharField(_(\"流水线ID\"), max_length=128, null=True, blank=True,", "qcloud_secret_key = EncryptionField(_(\"腾讯云SecretKey\"), default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_cos_bucket = models.CharField( _(\"腾讯云Cos桶名称\"), max_length=255, default=\"\",", "support the open source community by making BK-LOG 蓝鲸日志平台 available. Copyright (C) 2021", "help_text=_(\"内网链路不需要填写\") ) qcloud_cos_region = models.CharField( _(\"腾讯云Cos区域\"), max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") ) is_enable =", "task_status = task_service.get_state(self.pipeline_id) except Exception: # 存在多主机,单主机日志下载的情况,因此有可能有些pipeline节点未执行 logger.info(\"pipeline任务不存在,pipeline_id=>[{}]\".format(self.pipeline_id)) return \"0s\" component_status_list = []", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION", "JsonField(_(\"过滤内容\"), null=True, blank=True) download_status = models.CharField(_(\"当前文件下载状态\"), max_length=64, null=True, blank=True) expiration_date = models.DateTimeField(_(\"任务过期时间\"), default=None)", "all_pack_file_size = sum(int(ip.get(\"all_pack_file_size\", 0)) for ip in self.ex_data.values()) ret = [ f\"{_('下载文件总大小')}: {all_pack_file_size}kb\",", "f\"{self._cal_total_time(component_status_list)}s\" def _cal_total_time(self, components: List[dict]): return sum( [ ( datetime.strptime(component[\"finish_time\"], PIPELINE_TIME_FORMAT) - datetime.strptime(component[\"start_time\"],", "primary_key=True) link_type = models.CharField(_(\"链路类型\"), max_length=20, default=ExtractLinkType.COMMON.value) operator = models.CharField(_(\"执行人\"), max_length=255) op_bk_biz_id = models.IntegerField(_(\"执行bk_biz_id\"))", "persons to whom the Software is furnished to do so, subject to the", "is pleased to support the open source community by making BK-LOG 蓝鲸日志平台 available.", "from django.db.models import Q from django.utils.translation import ugettext_lazy as _ from apps.utils.log import", "self.search_fields] filter_q = reduce(operator.or_, filter_query) return self.filter(filter_q) return self class Tasks(OperateRecordModel): \"\"\"任务记录 一个\"下载\"行为记作一个\"Task\"", "Software is furnished to do so, subject to the following conditions: The above", "IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "= models.TextField(_(\"上传随机值\"), null=True, blank=True) # 创建中转服务器到云石的上传任务 job_upload_task_id = models.BigIntegerField(_(\"任务上传ID\"), null=True, blank=True) # 查询上传脚本的执行结果,", "max_length=20) preview_end_time = models.CharField(_(\"预览结束日期\"), null=True, blank=True, max_length=20) ex_data = JsonField(_(\"额外数据\"), null=True, blank=True) cos_file_name", "default=\"\") bk_cloud_id = models.IntegerField(_(\"主机云区域id\")) ip = models.GenericIPAddressField(_(\"主机ip\")) link = models.ForeignKey(ExtractLink, on_delete=models.CASCADE) class Meta:", "= models.IntegerField(_(\"业务id\"), db_index=True) ip_list = MultiStrSplitByCommaFieldText(_(\"业务机器ip\")) file_path = MultiStrSplitByCommaFieldText(_(\"文件列表\")) filter_type = models.CharField(_(\"过滤类型\"), max_length=16,", "bk_biz_id = models.IntegerField(_(\"业务ID\"), db_index=True) strategy_name = models.TextField(_(\"策略名称\")) user_list = MultiStrSplitByCommaFieldText(_(\"用户ID\")) select_type = models.CharField(_(\"目标选择类型\"),", "sum( [ ( datetime.strptime(component[\"finish_time\"], PIPELINE_TIME_FORMAT) - datetime.strptime(component[\"start_time\"], PIPELINE_TIME_FORMAT) ).seconds for component in components", "PIPELINE_TIME_FORMAT from pipeline.service import task_service class Strategies(SoftDeleteModel): \"\"\"用户策略表\"\"\" strategy_id = models.AutoField(_(\"策略ID\"), primary_key=True, default=None)", "蓝鲸日志平台: -------------------------------------------------------------------- Permission is hereby granted, free of charge, to any person obtaining", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND", "blank=True) class Meta: ordering = [\"-created_at\"] def get_link_type(self): try: return ExtractLink.objects.get(link_id=self.link_id).link_type except ExtractLink.DoesNotExist:", "blank=True) preview_directory = models.CharField(_(\"预览目录\"), null=True, blank=True, max_length=255) preview_ip = models.TextField(_(\"预览地址ip\"), null=True, blank=True) preview_time_range", "licensed under the MIT License. License for BK-LOG 蓝鲸日志平台: -------------------------------------------------------------------- Permission is hereby", "= models.AutoField(_(\"策略ID\"), primary_key=True, default=None) bk_biz_id = models.IntegerField(_(\"业务ID\"), db_index=True) strategy_name = models.TextField(_(\"策略名称\")) user_list =", "to deal in the Software without restriction, including without limitation the rights to", "primary_key=True) bk_biz_id = models.IntegerField(_(\"业务id\"), db_index=True) ip_list = MultiStrSplitByCommaFieldText(_(\"业务机器ip\")) file_path = MultiStrSplitByCommaFieldText(_(\"文件列表\")) filter_type =", "blank=True, max_length=20) preview_end_time = models.CharField(_(\"预览结束日期\"), null=True, blank=True, max_length=20) ex_data = JsonField(_(\"额外数据\"), null=True, blank=True)", "qcloud_cos_region = models.CharField( _(\"腾讯云Cos区域\"), max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") ) is_enable = models.BooleanField(_(\"是否启用\"), default=True)", "_(\"腾讯云Cos区域\"), max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") ) is_enable = models.BooleanField(_(\"是否启用\"), default=True) created_at = models.DateTimeField(_(\"创建时间\"),", "= models.IntegerField(_(\"业务ID\"), db_index=True) strategy_name = models.TextField(_(\"策略名称\")) user_list = MultiStrSplitByCommaFieldText(_(\"用户ID\")) select_type = models.CharField(_(\"目标选择类型\"), max_length=16)", "= _(\"提取链路 (第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") class ExtractLinkHost(models.Model): target_dir = models.CharField(_(\"挂载目录\"), max_length=255, default=\"\")", "return \"0s\" for component_id, component_info in self.pipeline_components_id[\"activities\"].items(): # 这里有可能有些pipeline组件并未执行 try: task_status[\"children\"][component_id][\"name\"] = component_info[\"name\"]", "to whom the Software is furnished to do so, subject to the following", "documentation files (the \"Software\"), to deal in the Software without restriction, including without", "models.BigIntegerField(_(\"上传票据\"), null=True, blank=True) cstone_upload_random = models.TextField(_(\"上传随机值\"), null=True, blank=True) # 创建中转服务器到云石的上传任务 job_upload_task_id = models.BigIntegerField(_(\"任务上传ID\"),", "file_type = MultiStrSplitByCommaFieldText(_(\"文件类型\")) operator = models.CharField(_(\"作业执行人\"), max_length=64, default=\"\") class Meta: ordering = [\"-updated_at\"]", "查询上传脚本的执行结果, 执行结果里有云石返回的task_id cstone_upload_task_id = models.BigIntegerField(_(\"云石上传ID\"), null=True, blank=True) # 用于查询中转服务器到云石的上传情况 # 云石上待下载的文件路径 cstone_file_path =", "files (the \"Software\"), to deal in the Software without restriction, including without limitation", "Software without restriction, including without limitation the rights to use, copy, modify, merge,", "models.CharField(_(\"cos对象文件名称\"), null=True, blank=True, max_length=255) link_id = models.IntegerField(_(\"链路id\"), null=True, blank=True) class Meta: ordering =", "preview_directory = models.CharField(_(\"预览目录\"), null=True, blank=True, max_length=255) preview_ip = models.TextField(_(\"预览地址ip\"), null=True, blank=True) preview_time_range =", "to do so, subject to the following conditions: The above copyright notice and", "models.TextField(_(\"备注\"), null=True, blank=True) preview_directory = models.CharField(_(\"预览目录\"), null=True, blank=True, max_length=255) preview_ip = models.TextField(_(\"预览地址ip\"), null=True,", "strategy_name = models.TextField(_(\"策略名称\")) user_list = MultiStrSplitByCommaFieldText(_(\"用户ID\")) select_type = models.CharField(_(\"目标选择类型\"), max_length=16) modules = JsonField(_(\"模块列表\"))", "cstone_download_bk_biz_id = models.BigIntegerField(_(\"云石下载业务ID\"), null=True, blank=True) cstone_download_ticket = models.BigIntegerField(_(\"下载票据\"), null=True, blank=True) # 根据票据向云石网盘发起下载请求 cstone_download_random", "null=True) class Meta: verbose_name = _(\"提取链路 (第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") verbose_name_plural = _(\"提取链路", ").seconds for component in components if component[\"finish_time\"] is not None ] ) total_elapsed.short_description", "f\"{_('下载文件总大小')}: {all_pack_file_size}kb\", f\"{_('下载原始文件原始总大小')}: {all_file_size}kb\", f\"{_('下载文件总数量')}: {all_file_num}\", ] return \" \".join(ret) download_file_detail.short_description = _(\"下载文件统计\")", "models.CharField(_(\"链路名称\"), max_length=255) link_id = models.AutoField(_(\"链路id\"), primary_key=True) link_type = models.CharField(_(\"链路类型\"), max_length=20, default=ExtractLinkType.COMMON.value) operator =", "in the Software without restriction, including without limitation the rights to use, copy,", "filter_query) return self.filter(filter_q) return self class Tasks(OperateRecordModel): \"\"\"任务记录 一个\"下载\"行为记作一个\"Task\" \"\"\" objects = TasksManager()", "= JsonField(_(\"额外数据\"), null=True, blank=True) cos_file_name = models.CharField(_(\"cos对象文件名称\"), null=True, blank=True, max_length=255) link_id = models.IntegerField(_(\"链路id\"),", "= models.IntegerField(_(\"主机云区域id\")) ip = models.GenericIPAddressField(_(\"主机ip\")) link = models.ForeignKey(ExtractLink, on_delete=models.CASCADE) class Meta: verbose_name =", "max_length=10, null=True, blank=True) preview_is_search_child = models.BooleanField(_(\"预览是否搜索子目录\"), default=False, blank=True) preview_start_time = models.CharField(_(\"预览开始日期\"), null=True, blank=True,", "max_length=128, null=True, blank=True, db_index=True) pipeline_components_id = JsonField(_(\"流水线组件ID\"), null=True, blank=True) job_task_id = models.BigIntegerField(_(\"文件分发ID\"), null=True,", "the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "to any person obtaining a copy of this software and associated documentation files", "= sum(int(ip.get(\"file_count\", 0)) for ip in self.ex_data.values()) all_pack_file_size = sum(int(ip.get(\"all_pack_file_size\", 0)) for ip", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN", "cstone_download_ticket = models.BigIntegerField(_(\"下载票据\"), null=True, blank=True) # 根据票据向云石网盘发起下载请求 cstone_download_random = models.TextField(_(\"下载随机值\"), null=True, blank=True) task_process_info", "蓝鲸日志平台 available. Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights", "for field in self.search_fields] filter_q = reduce(operator.or_, filter_query) return self.filter(filter_q) return self class", "blank=True) # 创建中转服务器到云石的上传任务 job_upload_task_id = models.BigIntegerField(_(\"任务上传ID\"), null=True, blank=True) # 查询上传脚本的执行结果, 执行结果里有云石返回的task_id cstone_upload_task_id =", "class ExtractLink(OperateRecordModel): name = models.CharField(_(\"链路名称\"), max_length=255) link_id = models.AutoField(_(\"链路id\"), primary_key=True) link_type = models.CharField(_(\"链路类型\"),", "\"\"\" objects = TasksManager() task_id = models.AutoField(_(\"任务记录id\"), primary_key=True) bk_biz_id = models.IntegerField(_(\"业务id\"), db_index=True) ip_list", "max_length=255) preview_ip = models.TextField(_(\"预览地址ip\"), null=True, blank=True) preview_time_range = models.CharField(_(\"预览日期\"), max_length=10, null=True, blank=True) preview_is_search_child", "company. All rights reserved. BK-LOG 蓝鲸日志平台 is licensed under the MIT License. License", "def _cal_total_time(self, components: List[dict]): return sum( [ ( datetime.strptime(component[\"finish_time\"], PIPELINE_TIME_FORMAT) - datetime.strptime(component[\"start_time\"], PIPELINE_TIME_FORMAT)", "ordering = [\"-created_at\"] def get_link_type(self): try: return ExtractLink.objects.get(link_id=self.link_id).link_type except ExtractLink.DoesNotExist: return \"\" def", "qcloud_cos_bucket = models.CharField( _(\"腾讯云Cos桶名称\"), max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") ) qcloud_cos_region = models.CharField( _(\"腾讯云Cos区域\"),", "调创建上传任务的API cstone_upload_ticket = models.BigIntegerField(_(\"上传票据\"), null=True, blank=True) cstone_upload_random = models.TextField(_(\"上传随机值\"), null=True, blank=True) # 创建中转服务器到云石的上传任务", "models.BigIntegerField(_(\"云石任务ID\"), null=True, blank=True) cstone_download_bk_biz_id = models.BigIntegerField(_(\"云石下载业务ID\"), null=True, blank=True) cstone_download_ticket = models.BigIntegerField(_(\"下载票据\"), null=True, blank=True)", "as _ from apps.utils.log import logger from apps.models import ( OperateRecordModel, SoftDeleteModel, JsonField,", "= models.TextField(_(\"预览地址ip\"), null=True, blank=True) preview_time_range = models.CharField(_(\"预览日期\"), max_length=10, null=True, blank=True) preview_is_search_child = models.BooleanField(_(\"预览是否搜索子目录\"),", "null=True, blank=True) # 查询上传脚本的执行结果, 执行结果里有云石返回的task_id cstone_upload_task_id = models.BigIntegerField(_(\"云石上传ID\"), null=True, blank=True) # 用于查询中转服务器到云石的上传情况 #", "models.BigIntegerField(_(\"云石下载业务ID\"), null=True, blank=True) cstone_download_ticket = models.BigIntegerField(_(\"下载票据\"), null=True, blank=True) # 根据票据向云石网盘发起下载请求 cstone_download_random = models.TextField(_(\"下载随机值\"),", "a copy of this software and associated documentation files (the \"Software\"), to deal", "Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "modules = JsonField(_(\"模块列表\")) visible_dir = MultiStrSplitByCommaFieldText(_(\"目录列表\")) file_type = MultiStrSplitByCommaFieldText(_(\"文件类型\")) operator = models.CharField(_(\"作业执行人\"), max_length=64,", "= JsonField(_(\"模块列表\")) visible_dir = MultiStrSplitByCommaFieldText(_(\"目录列表\")) file_type = MultiStrSplitByCommaFieldText(_(\"文件类型\")) operator = models.CharField(_(\"作业执行人\"), max_length=64, default=\"\")", "filter_q = reduce(operator.or_, filter_query) return self.filter(filter_q) return self class Tasks(OperateRecordModel): \"\"\"任务记录 一个\"下载\"行为记作一个\"Task\" \"\"\"", "暂时只支持linux及安装了cgwin的系统)\") verbose_name_plural = _(\"提取链路 (第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") class ExtractLinkHost(models.Model): target_dir = models.CharField(_(\"挂载目录\"),", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "null=True, blank=True) expiration_date = models.DateTimeField(_(\"任务过期时间\"), default=None) pipeline_id = models.CharField(_(\"流水线ID\"), max_length=128, null=True, blank=True, db_index=True)", "blank=True) cstone_download_bk_biz_id = models.BigIntegerField(_(\"云石下载业务ID\"), null=True, blank=True) cstone_download_ticket = models.BigIntegerField(_(\"下载票据\"), null=True, blank=True) # 根据票据向云石网盘发起下载请求", "class ExtractLinkHost(models.Model): target_dir = models.CharField(_(\"挂载目录\"), max_length=255, default=\"\") bk_cloud_id = models.IntegerField(_(\"主机云区域id\")) ip = models.GenericIPAddressField(_(\"主机ip\"))", "keyword: filter_query = [Q(**{f\"{field}__icontains\": keyword}) for field in self.search_fields] filter_q = reduce(operator.or_, filter_query)", "def ip_num(self): return len(self.ip_list) ip_num.short_description = _(\"IP数量\") def download_file_detail(self): all_file_size = sum(int(ip.get(\"all_origin_file_size\", 0))", "return f\"{self._cal_total_time(component_status_list)}s\" def _cal_total_time(self, components: List[dict]): return sum( [ ( datetime.strptime(component[\"finish_time\"], PIPELINE_TIME_FORMAT) -", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF", "total_elapsed.short_description = _(\"总耗时\") def ip_num(self): return len(self.ip_list) ip_num.short_description = _(\"IP数量\") def download_file_detail(self): all_file_size", "import ExtractLinkType, PIPELINE_TIME_FORMAT from pipeline.service import task_service class Strategies(SoftDeleteModel): \"\"\"用户策略表\"\"\" strategy_id = models.AutoField(_(\"策略ID\"),", "file_path = MultiStrSplitByCommaFieldText(_(\"文件列表\")) filter_type = models.CharField(_(\"过滤类型\"), max_length=16, null=True, blank=True) filter_content = JsonField(_(\"过滤内容\"), null=True,", "{all_file_num}\", ] return \" \".join(ret) download_file_detail.short_description = _(\"下载文件统计\") class ExtractLink(OperateRecordModel): name = models.CharField(_(\"链路名称\"),", "models.AutoField(_(\"任务记录id\"), primary_key=True) bk_biz_id = models.IntegerField(_(\"业务id\"), db_index=True) ip_list = MultiStrSplitByCommaFieldText(_(\"业务机器ip\")) file_path = MultiStrSplitByCommaFieldText(_(\"文件列表\")) filter_type", "# 查询上传脚本的执行结果, 执行结果里有云石返回的task_id cstone_upload_task_id = models.BigIntegerField(_(\"云石上传ID\"), null=True, blank=True) # 用于查询中转服务器到云石的上传情况 # 云石上待下载的文件路径 cstone_file_path", "USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" import operator from datetime import", "component_info in self.pipeline_components_id[\"activities\"].items(): # 这里有可能有些pipeline组件并未执行 try: task_status[\"children\"][component_id][\"name\"] = component_info[\"name\"] component_status_list.append(task_status[\"children\"][component_id]) except KeyError: pass", "\"activities\" not in self.pipeline_components_id: return \"0s\" for component_id, component_info in self.pipeline_components_id[\"activities\"].items(): # 这里有可能有些pipeline组件并未执行", "0)) for ip in self.ex_data.values()) all_pack_file_size = sum(int(ip.get(\"all_pack_file_size\", 0)) for ip in self.ex_data.values())", "free of charge, to any person obtaining a copy of this software and", "and this permission notice shall be included in all copies or substantial portions", "and to permit persons to whom the Software is furnished to do so,", "# 云石上待下载的文件路径 cstone_file_path = models.CharField(_(\"云石文件路径\"), default=None, max_length=64, null=True, blank=True) # 等到上传完毕后,调创建下载链接的API cstone_download_task_id =", "{all_file_size}kb\", f\"{_('下载文件总数量')}: {all_file_num}\", ] return \" \".join(ret) download_file_detail.short_description = _(\"下载文件统计\") class ExtractLink(OperateRecordModel): name", "Tencent is pleased to support the open source community by making BK-LOG 蓝鲸日志平台", "= _(\"IP数量\") def download_file_detail(self): all_file_size = sum(int(ip.get(\"all_origin_file_size\", 0)) for ip in self.ex_data.values()) all_file_num", "SoftDeleteModel, JsonField, MultiStrSplitByCommaFieldText, OperateRecordModelManager, EncryptionField, ) from apps.log_extract.constants import ExtractLinkType, PIPELINE_TIME_FORMAT from pipeline.service", "except ExtractLink.DoesNotExist: return \"\" def get_extract(self): from apps.log_extract.handlers.extract import ExtractLinkFactory return ExtractLinkFactory.get_link(self.get_link_type())() def", "task_status[\"children\"][component_id][\"name\"] = component_info[\"name\"] component_status_list.append(task_status[\"children\"][component_id]) except KeyError: pass return f\"{self._cal_total_time(component_status_list)}s\" def _cal_total_time(self, components: List[dict]):", "search(self, keyword): if keyword: filter_query = [Q(**{f\"{field}__icontains\": keyword}) for field in self.search_fields] filter_q", "blank=True, max_length=255) preview_ip = models.TextField(_(\"预览地址ip\"), null=True, blank=True) preview_time_range = models.CharField(_(\"预览日期\"), max_length=10, null=True, blank=True)", "rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of", "import ( OperateRecordModel, SoftDeleteModel, JsonField, MultiStrSplitByCommaFieldText, OperateRecordModelManager, EncryptionField, ) from apps.log_extract.constants import ExtractLinkType,", "models.DateTimeField(_(\"创建时间\"), auto_now_add=True, blank=True, db_index=True, null=True) class Meta: verbose_name = _(\"提取链路 (第一次配置链路之后 需要重新部署saas &&", "ExtractLinkFactory.get_link(self.get_link_type())() def get_link(self): return ExtractLink.objects.filter(link_id=self.link_id).first() def total_elapsed(self): try: task_status = task_service.get_state(self.pipeline_id) except Exception:", "models.CharField(_(\"链路类型\"), max_length=20, default=ExtractLinkType.COMMON.value) operator = models.CharField(_(\"执行人\"), max_length=255) op_bk_biz_id = models.IntegerField(_(\"执行bk_biz_id\")) qcloud_secret_id = EncryptionField(_(\"腾讯云SecretId\"),", "return len(self.ip_list) ip_num.short_description = _(\"IP数量\") def download_file_detail(self): all_file_size = sum(int(ip.get(\"all_origin_file_size\", 0)) for ip", "null=True, blank=True) cstone_download_bk_biz_id = models.BigIntegerField(_(\"云石下载业务ID\"), null=True, blank=True) cstone_download_ticket = models.BigIntegerField(_(\"下载票据\"), null=True, blank=True) #", "= [\"-created_at\"] def get_link_type(self): try: return ExtractLink.objects.get(link_id=self.link_id).link_type except ExtractLink.DoesNotExist: return \"\" def get_extract(self):", "JsonField(_(\"流水线组件ID\"), null=True, blank=True) job_task_id = models.BigIntegerField(_(\"文件分发ID\"), null=True, blank=True) # 调创建上传任务的API cstone_upload_ticket = models.BigIntegerField(_(\"上传票据\"),", "django.utils.translation import ugettext_lazy as _ from apps.utils.log import logger from apps.models import (", "EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "reserved. BK-LOG 蓝鲸日志平台 is licensed under the MIT License. License for BK-LOG 蓝鲸日志平台:", "django.db.models import Q from django.utils.translation import ugettext_lazy as _ from apps.utils.log import logger", "models from django.db.models import Q from django.utils.translation import ugettext_lazy as _ from apps.utils.log", "ex_data = JsonField(_(\"额外数据\"), null=True, blank=True) cos_file_name = models.CharField(_(\"cos对象文件名称\"), null=True, blank=True, max_length=255) link_id =", "= EncryptionField(_(\"腾讯云SecretId\"), default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_secret_key = EncryptionField(_(\"腾讯云SecretKey\"), default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\"))", "ordering = [\"-updated_at\"] class TasksManager(OperateRecordModelManager): search_fields = [\"ip_list\", \"file_path\", \"created_by\", \"remark\"] def search(self,", "for ip in self.ex_data.values()) ret = [ f\"{_('下载文件总大小')}: {all_pack_file_size}kb\", f\"{_('下载原始文件原始总大小')}: {all_file_size}kb\", f\"{_('下载文件总数量')}: {all_file_num}\",", "associated documentation files (the \"Software\"), to deal in the Software without restriction, including", "link_type = models.CharField(_(\"链路类型\"), max_length=20, default=ExtractLinkType.COMMON.value) operator = models.CharField(_(\"执行人\"), max_length=255) op_bk_biz_id = models.IntegerField(_(\"执行bk_biz_id\")) qcloud_secret_id", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE", "暂时只支持linux及安装了cgwin的系统)\") class ExtractLinkHost(models.Model): target_dir = models.CharField(_(\"挂载目录\"), max_length=255, default=\"\") bk_cloud_id = models.IntegerField(_(\"主机云区域id\")) ip =", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,", "BK-LOG 蓝鲸日志平台 is licensed under the MIT License. License for BK-LOG 蓝鲸日志平台: --------------------------------------------------------------------", "blank=True) remark = models.TextField(_(\"备注\"), null=True, blank=True) preview_directory = models.CharField(_(\"预览目录\"), null=True, blank=True, max_length=255) preview_ip", "notice shall be included in all copies or substantial portions of the Software.", "models.TextField(_(\"策略名称\")) user_list = MultiStrSplitByCommaFieldText(_(\"用户ID\")) select_type = models.CharField(_(\"目标选择类型\"), max_length=16) modules = JsonField(_(\"模块列表\")) visible_dir =", "\"\"\"任务记录 一个\"下载\"行为记作一个\"Task\" \"\"\" objects = TasksManager() task_id = models.AutoField(_(\"任务记录id\"), primary_key=True) bk_biz_id = models.IntegerField(_(\"业务id\"),", "models.BooleanField(_(\"是否启用\"), default=True) created_at = models.DateTimeField(_(\"创建时间\"), auto_now_add=True, blank=True, db_index=True, null=True) class Meta: verbose_name =", "pass return f\"{self._cal_total_time(component_status_list)}s\" def _cal_total_time(self, components: List[dict]): return sum( [ ( datetime.strptime(component[\"finish_time\"], PIPELINE_TIME_FORMAT)", "verbose_name_plural = _(\"提取链路 (第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") class ExtractLinkHost(models.Model): target_dir = models.CharField(_(\"挂载目录\"), max_length=255,", "null=True, blank=True) preview_directory = models.CharField(_(\"预览目录\"), null=True, blank=True, max_length=255) preview_ip = models.TextField(_(\"预览地址ip\"), null=True, blank=True)", "import logger from apps.models import ( OperateRecordModel, SoftDeleteModel, JsonField, MultiStrSplitByCommaFieldText, OperateRecordModelManager, EncryptionField, )", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT", "under the MIT License. License for BK-LOG 蓝鲸日志平台: -------------------------------------------------------------------- Permission is hereby granted,", "copy of this software and associated documentation files (the \"Software\"), to deal in", "datetime from functools import reduce from typing import List from django.db import models", "models.DateTimeField(_(\"任务过期时间\"), default=None) pipeline_id = models.CharField(_(\"流水线ID\"), max_length=128, null=True, blank=True, db_index=True) pipeline_components_id = JsonField(_(\"流水线组件ID\"), null=True,", "[ f\"{_('下载文件总大小')}: {all_pack_file_size}kb\", f\"{_('下载原始文件原始总大小')}: {all_file_size}kb\", f\"{_('下载文件总数量')}: {all_file_num}\", ] return \" \".join(ret) download_file_detail.short_description =", "return ExtractLink.objects.filter(link_id=self.link_id).first() def total_elapsed(self): try: task_status = task_service.get_state(self.pipeline_id) except Exception: # 存在多主机,单主机日志下载的情况,因此有可能有些pipeline节点未执行 logger.info(\"pipeline任务不存在,pipeline_id=>[{}]\".format(self.pipeline_id))", "substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "TasksManager(OperateRecordModelManager): search_fields = [\"ip_list\", \"file_path\", \"created_by\", \"remark\"] def search(self, keyword): if keyword: filter_query", "License for BK-LOG 蓝鲸日志平台: -------------------------------------------------------------------- Permission is hereby granted, free of charge, to", "Meta: ordering = [\"-updated_at\"] class TasksManager(OperateRecordModelManager): search_fields = [\"ip_list\", \"file_path\", \"created_by\", \"remark\"] def", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION", "= models.BooleanField(_(\"预览是否搜索子目录\"), default=False, blank=True) preview_start_time = models.CharField(_(\"预览开始日期\"), null=True, blank=True, max_length=20) preview_end_time = models.CharField(_(\"预览结束日期\"),", "(C) 2021 THL A29 Limited, a Tencent company. All rights reserved. BK-LOG 蓝鲸日志平台", "models.CharField(_(\"挂载目录\"), max_length=255, default=\"\") bk_cloud_id = models.IntegerField(_(\"主机云区域id\")) ip = models.GenericIPAddressField(_(\"主机ip\")) link = models.ForeignKey(ExtractLink, on_delete=models.CASCADE)", "OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" import operator from", "obtaining a copy of this software and associated documentation files (the \"Software\"), to", "is not None ] ) total_elapsed.short_description = _(\"总耗时\") def ip_num(self): return len(self.ip_list) ip_num.short_description", "blank=True) filter_content = JsonField(_(\"过滤内容\"), null=True, blank=True) download_status = models.CharField(_(\"当前文件下载状态\"), max_length=64, null=True, blank=True) expiration_date", "models.AutoField(_(\"策略ID\"), primary_key=True, default=None) bk_biz_id = models.IntegerField(_(\"业务ID\"), db_index=True) strategy_name = models.TextField(_(\"策略名称\")) user_list = MultiStrSplitByCommaFieldText(_(\"用户ID\"))", "null=True, blank=True) # 根据票据向云石网盘发起下载请求 cstone_download_random = models.TextField(_(\"下载随机值\"), null=True, blank=True) task_process_info = models.TextField(_(\"任务过程信息\"), null=True,", "TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "available. Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.", "op_bk_biz_id = models.IntegerField(_(\"执行bk_biz_id\")) qcloud_secret_id = EncryptionField(_(\"腾讯云SecretId\"), default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_secret_key = EncryptionField(_(\"腾讯云SecretKey\"),", "= models.AutoField(_(\"链路id\"), primary_key=True) link_type = models.CharField(_(\"链路类型\"), max_length=20, default=ExtractLinkType.COMMON.value) operator = models.CharField(_(\"执行人\"), max_length=255) op_bk_biz_id", "ip in self.ex_data.values()) ret = [ f\"{_('下载文件总大小')}: {all_pack_file_size}kb\", f\"{_('下载原始文件原始总大小')}: {all_file_size}kb\", f\"{_('下载文件总数量')}: {all_file_num}\", ]", "# coding=utf-8 \"\"\" Tencent is pleased to support the open source community by", "= models.CharField(_(\"挂载目录\"), max_length=255, default=\"\") bk_cloud_id = models.IntegerField(_(\"主机云区域id\")) ip = models.GenericIPAddressField(_(\"主机ip\")) link = models.ForeignKey(ExtractLink,", "OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "models.TextField(_(\"上传随机值\"), null=True, blank=True) # 创建中转服务器到云石的上传任务 job_upload_task_id = models.BigIntegerField(_(\"任务上传ID\"), null=True, blank=True) # 查询上传脚本的执行结果, 执行结果里有云石返回的task_id", "Strategies(SoftDeleteModel): \"\"\"用户策略表\"\"\" strategy_id = models.AutoField(_(\"策略ID\"), primary_key=True, default=None) bk_biz_id = models.IntegerField(_(\"业务ID\"), db_index=True) strategy_name =", "job_task_id = models.BigIntegerField(_(\"文件分发ID\"), null=True, blank=True) # 调创建上传任务的API cstone_upload_ticket = models.BigIntegerField(_(\"上传票据\"), null=True, blank=True) cstone_upload_random", "= models.CharField(_(\"执行人\"), max_length=255) op_bk_biz_id = models.IntegerField(_(\"执行bk_biz_id\")) qcloud_secret_id = EncryptionField(_(\"腾讯云SecretId\"), default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\"))", "= [] if \"activities\" not in self.pipeline_components_id: return \"0s\" for component_id, component_info in", "models.TextField(_(\"预览地址ip\"), null=True, blank=True) preview_time_range = models.CharField(_(\"预览日期\"), max_length=10, null=True, blank=True) preview_is_search_child = models.BooleanField(_(\"预览是否搜索子目录\"), default=False,", "for component in components if component[\"finish_time\"] is not None ] ) total_elapsed.short_description =", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR", "models.CharField(_(\"流水线ID\"), max_length=128, null=True, blank=True, db_index=True) pipeline_components_id = JsonField(_(\"流水线组件ID\"), null=True, blank=True) job_task_id = models.BigIntegerField(_(\"文件分发ID\"),", "OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "task_id = models.AutoField(_(\"任务记录id\"), primary_key=True) bk_biz_id = models.IntegerField(_(\"业务id\"), db_index=True) ip_list = MultiStrSplitByCommaFieldText(_(\"业务机器ip\")) file_path =", "expiration_date = models.DateTimeField(_(\"任务过期时间\"), default=None) pipeline_id = models.CharField(_(\"流水线ID\"), max_length=128, null=True, blank=True, db_index=True) pipeline_components_id =", "publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons", "import ExtractLinkFactory return ExtractLinkFactory.get_link(self.get_link_type())() def get_link(self): return ExtractLink.objects.filter(link_id=self.link_id).first() def total_elapsed(self): try: task_status =", "pleased to support the open source community by making BK-LOG 蓝鲸日志平台 available. Copyright", "models.BigIntegerField(_(\"文件分发ID\"), null=True, blank=True) # 调创建上传任务的API cstone_upload_ticket = models.BigIntegerField(_(\"上传票据\"), null=True, blank=True) cstone_upload_random = models.TextField(_(\"上传随机值\"),", "preview_time_range = models.CharField(_(\"预览日期\"), max_length=10, null=True, blank=True) preview_is_search_child = models.BooleanField(_(\"预览是否搜索子目录\"), default=False, blank=True) preview_start_time =", "return \" \".join(ret) download_file_detail.short_description = _(\"下载文件统计\") class ExtractLink(OperateRecordModel): name = models.CharField(_(\"链路名称\"), max_length=255) link_id", "including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,", "[\"-created_at\"] def get_link_type(self): try: return ExtractLink.objects.get(link_id=self.link_id).link_type except ExtractLink.DoesNotExist: return \"\" def get_extract(self): from", "if component[\"finish_time\"] is not None ] ) total_elapsed.short_description = _(\"总耗时\") def ip_num(self): return", "or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "from django.db import models from django.db.models import Q from django.utils.translation import ugettext_lazy as", "BK-LOG 蓝鲸日志平台: -------------------------------------------------------------------- Permission is hereby granted, free of charge, to any person", "\"created_by\", \"remark\"] def search(self, keyword): if keyword: filter_query = [Q(**{f\"{field}__icontains\": keyword}) for field", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" import", "except KeyError: pass return f\"{self._cal_total_time(component_status_list)}s\" def _cal_total_time(self, components: List[dict]): return sum( [ (", "in self.pipeline_components_id: return \"0s\" for component_id, component_info in self.pipeline_components_id[\"activities\"].items(): # 这里有可能有些pipeline组件并未执行 try: task_status[\"children\"][component_id][\"name\"]", "blank=True, help_text=_(\"内网链路不需要填写\") ) qcloud_cos_region = models.CharField( _(\"腾讯云Cos区域\"), max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") ) is_enable", "all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS", "Tencent company. All rights reserved. BK-LOG 蓝鲸日志平台 is licensed under the MIT License.", "SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "if keyword: filter_query = [Q(**{f\"{field}__icontains\": keyword}) for field in self.search_fields] filter_q = reduce(operator.or_,", "cstone_file_path = models.CharField(_(\"云石文件路径\"), default=None, max_length=64, null=True, blank=True) # 等到上传完毕后,调创建下载链接的API cstone_download_task_id = models.BigIntegerField(_(\"云石任务ID\"), null=True,", "ExtractLinkType, PIPELINE_TIME_FORMAT from pipeline.service import task_service class Strategies(SoftDeleteModel): \"\"\"用户策略表\"\"\" strategy_id = models.AutoField(_(\"策略ID\"), primary_key=True,", "len(self.ip_list) ip_num.short_description = _(\"IP数量\") def download_file_detail(self): all_file_size = sum(int(ip.get(\"all_origin_file_size\", 0)) for ip in", "def download_file_detail(self): all_file_size = sum(int(ip.get(\"all_origin_file_size\", 0)) for ip in self.ex_data.values()) all_file_num = sum(int(ip.get(\"file_count\",", "default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") ) is_enable = models.BooleanField(_(\"是否启用\"), default=True) created_at = models.DateTimeField(_(\"创建时间\"), auto_now_add=True, blank=True,", "= models.CharField(_(\"当前文件下载状态\"), max_length=64, null=True, blank=True) expiration_date = models.DateTimeField(_(\"任务过期时间\"), default=None) pipeline_id = models.CharField(_(\"流水线ID\"), max_length=128,", "datetime.strptime(component[\"finish_time\"], PIPELINE_TIME_FORMAT) - datetime.strptime(component[\"start_time\"], PIPELINE_TIME_FORMAT) ).seconds for component in components if component[\"finish_time\"] is", "db_index=True) ip_list = MultiStrSplitByCommaFieldText(_(\"业务机器ip\")) file_path = MultiStrSplitByCommaFieldText(_(\"文件列表\")) filter_type = models.CharField(_(\"过滤类型\"), max_length=16, null=True, blank=True)", "verbose_name = _(\"提取链路 (第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") verbose_name_plural = _(\"提取链路 (第一次配置链路之后 需要重新部署saas &&", "models.BooleanField(_(\"预览是否搜索子目录\"), default=False, blank=True) preview_start_time = models.CharField(_(\"预览开始日期\"), null=True, blank=True, max_length=20) preview_end_time = models.CharField(_(\"预览结束日期\"), null=True,", "class Tasks(OperateRecordModel): \"\"\"任务记录 一个\"下载\"行为记作一个\"Task\" \"\"\" objects = TasksManager() task_id = models.AutoField(_(\"任务记录id\"), primary_key=True) bk_biz_id", "MultiStrSplitByCommaFieldText(_(\"用户ID\")) select_type = models.CharField(_(\"目标选择类型\"), max_length=16) modules = JsonField(_(\"模块列表\")) visible_dir = MultiStrSplitByCommaFieldText(_(\"目录列表\")) file_type =", "the open source community by making BK-LOG 蓝鲸日志平台 available. Copyright (C) 2021 THL", "reduce from typing import List from django.db import models from django.db.models import Q", "models.IntegerField(_(\"业务ID\"), db_index=True) strategy_name = models.TextField(_(\"策略名称\")) user_list = MultiStrSplitByCommaFieldText(_(\"用户ID\")) select_type = models.CharField(_(\"目标选择类型\"), max_length=16) modules", "OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "= MultiStrSplitByCommaFieldText(_(\"用户ID\")) select_type = models.CharField(_(\"目标选择类型\"), max_length=16) modules = JsonField(_(\"模块列表\")) visible_dir = MultiStrSplitByCommaFieldText(_(\"目录列表\")) file_type", "blank=True) # 等到上传完毕后,调创建下载链接的API cstone_download_task_id = models.BigIntegerField(_(\"云石任务ID\"), null=True, blank=True) cstone_download_bk_biz_id = models.BigIntegerField(_(\"云石下载业务ID\"), null=True, blank=True)", "apps.log_extract.handlers.extract import ExtractLinkFactory return ExtractLinkFactory.get_link(self.get_link_type())() def get_link(self): return ExtractLink.objects.filter(link_id=self.link_id).first() def total_elapsed(self): try: task_status", "用于查询中转服务器到云石的上传情况 # 云石上待下载的文件路径 cstone_file_path = models.CharField(_(\"云石文件路径\"), default=None, max_length=64, null=True, blank=True) # 等到上传完毕后,调创建下载链接的API cstone_download_task_id", "-------------------------------------------------------------------- Permission is hereby granted, free of charge, to any person obtaining a", "MultiStrSplitByCommaFieldText(_(\"业务机器ip\")) file_path = MultiStrSplitByCommaFieldText(_(\"文件列表\")) filter_type = models.CharField(_(\"过滤类型\"), max_length=16, null=True, blank=True) filter_content = JsonField(_(\"过滤内容\"),", "_(\"下载文件统计\") class ExtractLink(OperateRecordModel): name = models.CharField(_(\"链路名称\"), max_length=255) link_id = models.AutoField(_(\"链路id\"), primary_key=True) link_type =", "OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" import operator from datetime import datetime", "class Strategies(SoftDeleteModel): \"\"\"用户策略表\"\"\" strategy_id = models.AutoField(_(\"策略ID\"), primary_key=True, default=None) bk_biz_id = models.IntegerField(_(\"业务ID\"), db_index=True) strategy_name", "models.CharField(_(\"执行人\"), max_length=255) op_bk_biz_id = models.IntegerField(_(\"执行bk_biz_id\")) qcloud_secret_id = EncryptionField(_(\"腾讯云SecretId\"), default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_secret_key", "= models.CharField(_(\"预览日期\"), max_length=10, null=True, blank=True) preview_is_search_child = models.BooleanField(_(\"预览是否搜索子目录\"), default=False, blank=True) preview_start_time = models.CharField(_(\"预览开始日期\"),", "null=True, blank=True) job_task_id = models.BigIntegerField(_(\"文件分发ID\"), null=True, blank=True) # 调创建上传任务的API cstone_upload_ticket = models.BigIntegerField(_(\"上传票据\"), null=True,", "community by making BK-LOG 蓝鲸日志平台 available. Copyright (C) 2021 THL A29 Limited, a", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN", "primary_key=True, default=None) bk_biz_id = models.IntegerField(_(\"业务ID\"), db_index=True) strategy_name = models.TextField(_(\"策略名称\")) user_list = MultiStrSplitByCommaFieldText(_(\"用户ID\")) select_type", "= models.BigIntegerField(_(\"文件分发ID\"), null=True, blank=True) # 调创建上传任务的API cstone_upload_ticket = models.BigIntegerField(_(\"上传票据\"), null=True, blank=True) cstone_upload_random =", "THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" import operator from datetime", "self.pipeline_components_id: return \"0s\" for component_id, component_info in self.pipeline_components_id[\"activities\"].items(): # 这里有可能有些pipeline组件并未执行 try: task_status[\"children\"][component_id][\"name\"] =", "by making BK-LOG 蓝鲸日志平台 available. Copyright (C) 2021 THL A29 Limited, a Tencent", "objects = TasksManager() task_id = models.AutoField(_(\"任务记录id\"), primary_key=True) bk_biz_id = models.IntegerField(_(\"业务id\"), db_index=True) ip_list =", "ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "# 这里有可能有些pipeline组件并未执行 try: task_status[\"children\"][component_id][\"name\"] = component_info[\"name\"] component_status_list.append(task_status[\"children\"][component_id]) except KeyError: pass return f\"{self._cal_total_time(component_status_list)}s\" def", "Exception: # 存在多主机,单主机日志下载的情况,因此有可能有些pipeline节点未执行 logger.info(\"pipeline任务不存在,pipeline_id=>[{}]\".format(self.pipeline_id)) return \"0s\" component_status_list = [] if \"activities\" not in", "OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "(第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") class ExtractLinkHost(models.Model): target_dir = models.CharField(_(\"挂载目录\"), max_length=255, default=\"\") bk_cloud_id =", "models.CharField(_(\"目标选择类型\"), max_length=16) modules = JsonField(_(\"模块列表\")) visible_dir = MultiStrSplitByCommaFieldText(_(\"目录列表\")) file_type = MultiStrSplitByCommaFieldText(_(\"文件类型\")) operator =", "above copyright notice and this permission notice shall be included in all copies", "# 调创建上传任务的API cstone_upload_ticket = models.BigIntegerField(_(\"上传票据\"), null=True, blank=True) cstone_upload_random = models.TextField(_(\"上传随机值\"), null=True, blank=True) #", "- datetime.strptime(component[\"start_time\"], PIPELINE_TIME_FORMAT) ).seconds for component in components if component[\"finish_time\"] is not None", "WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "等到上传完毕后,调创建下载链接的API cstone_download_task_id = models.BigIntegerField(_(\"云石任务ID\"), null=True, blank=True) cstone_download_bk_biz_id = models.BigIntegerField(_(\"云石下载业务ID\"), null=True, blank=True) cstone_download_ticket =", "EncryptionField, ) from apps.log_extract.constants import ExtractLinkType, PIPELINE_TIME_FORMAT from pipeline.service import task_service class Strategies(SoftDeleteModel):", "ip in self.ex_data.values()) all_file_num = sum(int(ip.get(\"file_count\", 0)) for ip in self.ex_data.values()) all_pack_file_size =", "{all_pack_file_size}kb\", f\"{_('下载原始文件原始总大小')}: {all_file_size}kb\", f\"{_('下载文件总数量')}: {all_file_num}\", ] return \" \".join(ret) download_file_detail.short_description = _(\"下载文件统计\") class", "= _(\"提取链路 (第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") verbose_name_plural = _(\"提取链路 (第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\")", "component_status_list.append(task_status[\"children\"][component_id]) except KeyError: pass return f\"{self._cal_total_time(component_status_list)}s\" def _cal_total_time(self, components: List[dict]): return sum( [", "cstone_upload_random = models.TextField(_(\"上传随机值\"), null=True, blank=True) # 创建中转服务器到云石的上传任务 job_upload_task_id = models.BigIntegerField(_(\"任务上传ID\"), null=True, blank=True) #", "component in components if component[\"finish_time\"] is not None ] ) total_elapsed.short_description = _(\"总耗时\")", "] ) total_elapsed.short_description = _(\"总耗时\") def ip_num(self): return len(self.ip_list) ip_num.short_description = _(\"IP数量\") def", "( OperateRecordModel, SoftDeleteModel, JsonField, MultiStrSplitByCommaFieldText, OperateRecordModelManager, EncryptionField, ) from apps.log_extract.constants import ExtractLinkType, PIPELINE_TIME_FORMAT", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS", "permission notice shall be included in all copies or substantial portions of the", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS", "List from django.db import models from django.db.models import Q from django.utils.translation import ugettext_lazy", "IN THE SOFTWARE. \"\"\" import operator from datetime import datetime from functools import", "= models.BigIntegerField(_(\"任务上传ID\"), null=True, blank=True) # 查询上传脚本的执行结果, 执行结果里有云石返回的task_id cstone_upload_task_id = models.BigIntegerField(_(\"云石上传ID\"), null=True, blank=True) #", "blank=True) cos_file_name = models.CharField(_(\"cos对象文件名称\"), null=True, blank=True, max_length=255) link_id = models.IntegerField(_(\"链路id\"), null=True, blank=True) class", "= models.BigIntegerField(_(\"下载票据\"), null=True, blank=True) # 根据票据向云石网盘发起下载请求 cstone_download_random = models.TextField(_(\"下载随机值\"), null=True, blank=True) task_process_info =", "return \"0s\" component_status_list = [] if \"activities\" not in self.pipeline_components_id: return \"0s\" for", "bk_cloud_id = models.IntegerField(_(\"主机云区域id\")) ip = models.GenericIPAddressField(_(\"主机ip\")) link = models.ForeignKey(ExtractLink, on_delete=models.CASCADE) class Meta: verbose_name", "这里有可能有些pipeline组件并未执行 try: task_status[\"children\"][component_id][\"name\"] = component_info[\"name\"] component_status_list.append(task_status[\"children\"][component_id]) except KeyError: pass return f\"{self._cal_total_time(component_status_list)}s\" def _cal_total_time(self,", "models.CharField( _(\"腾讯云Cos桶名称\"), max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") ) qcloud_cos_region = models.CharField( _(\"腾讯云Cos区域\"), max_length=255, default=\"\",", "cstone_download_task_id = models.BigIntegerField(_(\"云石任务ID\"), null=True, blank=True) cstone_download_bk_biz_id = models.BigIntegerField(_(\"云石下载业务ID\"), null=True, blank=True) cstone_download_ticket = models.BigIntegerField(_(\"下载票据\"),", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH", "preview_start_time = models.CharField(_(\"预览开始日期\"), null=True, blank=True, max_length=20) preview_end_time = models.CharField(_(\"预览结束日期\"), null=True, blank=True, max_length=20) ex_data", "sum(int(ip.get(\"all_origin_file_size\", 0)) for ip in self.ex_data.values()) all_file_num = sum(int(ip.get(\"file_count\", 0)) for ip in", "Tasks(OperateRecordModel): \"\"\"任务记录 一个\"下载\"行为记作一个\"Task\" \"\"\" objects = TasksManager() task_id = models.AutoField(_(\"任务记录id\"), primary_key=True) bk_biz_id =", "default=\"\") class Meta: ordering = [\"-updated_at\"] class TasksManager(OperateRecordModelManager): search_fields = [\"ip_list\", \"file_path\", \"created_by\",", "blank=True) cstone_download_ticket = models.BigIntegerField(_(\"下载票据\"), null=True, blank=True) # 根据票据向云石网盘发起下载请求 cstone_download_random = models.TextField(_(\"下载随机值\"), null=True, blank=True)", "models.AutoField(_(\"链路id\"), primary_key=True) link_type = models.CharField(_(\"链路类型\"), max_length=20, default=ExtractLinkType.COMMON.value) operator = models.CharField(_(\"执行人\"), max_length=255) op_bk_biz_id =", "the following conditions: The above copyright notice and this permission notice shall be", "total_elapsed(self): try: task_status = task_service.get_state(self.pipeline_id) except Exception: # 存在多主机,单主机日志下载的情况,因此有可能有些pipeline节点未执行 logger.info(\"pipeline任务不存在,pipeline_id=>[{}]\".format(self.pipeline_id)) return \"0s\" component_status_list", "created_at = models.DateTimeField(_(\"创建时间\"), auto_now_add=True, blank=True, db_index=True, null=True) class Meta: verbose_name = _(\"提取链路 (第一次配置链路之后", "ExtractLink.objects.filter(link_id=self.link_id).first() def total_elapsed(self): try: task_status = task_service.get_state(self.pipeline_id) except Exception: # 存在多主机,单主机日志下载的情况,因此有可能有些pipeline节点未执行 logger.info(\"pipeline任务不存在,pipeline_id=>[{}]\".format(self.pipeline_id)) return", "task_service.get_state(self.pipeline_id) except Exception: # 存在多主机,单主机日志下载的情况,因此有可能有些pipeline节点未执行 logger.info(\"pipeline任务不存在,pipeline_id=>[{}]\".format(self.pipeline_id)) return \"0s\" component_status_list = [] if \"activities\"", "PIPELINE_TIME_FORMAT) - datetime.strptime(component[\"start_time\"], PIPELINE_TIME_FORMAT) ).seconds for component in components if component[\"finish_time\"] is not", "models.BigIntegerField(_(\"云石上传ID\"), null=True, blank=True) # 用于查询中转服务器到云石的上传情况 # 云石上待下载的文件路径 cstone_file_path = models.CharField(_(\"云石文件路径\"), default=None, max_length=64, null=True,", "return \"\" def get_extract(self): from apps.log_extract.handlers.extract import ExtractLinkFactory return ExtractLinkFactory.get_link(self.get_link_type())() def get_link(self): return", "return ExtractLinkFactory.get_link(self.get_link_type())() def get_link(self): return ExtractLink.objects.filter(link_id=self.link_id).first() def total_elapsed(self): try: task_status = task_service.get_state(self.pipeline_id) except", "self.ex_data.values()) all_file_num = sum(int(ip.get(\"file_count\", 0)) for ip in self.ex_data.values()) all_pack_file_size = sum(int(ip.get(\"all_pack_file_size\", 0))", "import task_service class Strategies(SoftDeleteModel): \"\"\"用户策略表\"\"\" strategy_id = models.AutoField(_(\"策略ID\"), primary_key=True, default=None) bk_biz_id = models.IntegerField(_(\"业务ID\"),", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT", "Limited, a Tencent company. All rights reserved. BK-LOG 蓝鲸日志平台 is licensed under the", "furnished to do so, subject to the following conditions: The above copyright notice", "target_dir = models.CharField(_(\"挂载目录\"), max_length=255, default=\"\") bk_cloud_id = models.IntegerField(_(\"主机云区域id\")) ip = models.GenericIPAddressField(_(\"主机ip\")) link =", "= reduce(operator.or_, filter_query) return self.filter(filter_q) return self class Tasks(OperateRecordModel): \"\"\"任务记录 一个\"下载\"行为记作一个\"Task\" \"\"\" objects", "permit persons to whom the Software is furnished to do so, subject to", "to support the open source community by making BK-LOG 蓝鲸日志平台 available. Copyright (C)", "# 用于查询中转服务器到云石的上传情况 # 云石上待下载的文件路径 cstone_file_path = models.CharField(_(\"云石文件路径\"), default=None, max_length=64, null=True, blank=True) # 等到上传完毕后,调创建下载链接的API", "operator = models.CharField(_(\"作业执行人\"), max_length=64, default=\"\") class Meta: ordering = [\"-updated_at\"] class TasksManager(OperateRecordModelManager): search_fields", "any person obtaining a copy of this software and associated documentation files (the", "models.CharField(_(\"过滤类型\"), max_length=16, null=True, blank=True) filter_content = JsonField(_(\"过滤内容\"), null=True, blank=True) download_status = models.CharField(_(\"当前文件下载状态\"), max_length=64,", "BK-LOG 蓝鲸日志平台 available. Copyright (C) 2021 THL A29 Limited, a Tencent company. All", "= EncryptionField(_(\"腾讯云SecretKey\"), default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_cos_bucket = models.CharField( _(\"腾讯云Cos桶名称\"), max_length=255, default=\"\", blank=True,", "max_length=16, null=True, blank=True) filter_content = JsonField(_(\"过滤内容\"), null=True, blank=True) download_status = models.CharField(_(\"当前文件下载状态\"), max_length=64, null=True,", "ExtractLinkHost(models.Model): target_dir = models.CharField(_(\"挂载目录\"), max_length=255, default=\"\") bk_cloud_id = models.IntegerField(_(\"主机云区域id\")) ip = models.GenericIPAddressField(_(\"主机ip\")) link", "blank=True, max_length=255) link_id = models.IntegerField(_(\"链路id\"), null=True, blank=True) class Meta: ordering = [\"-created_at\"] def", "from django.utils.translation import ugettext_lazy as _ from apps.utils.log import logger from apps.models import", "= _(\"总耗时\") def ip_num(self): return len(self.ip_list) ip_num.short_description = _(\"IP数量\") def download_file_detail(self): all_file_size =", "copies of the Software, and to permit persons to whom the Software is", "from apps.log_extract.constants import ExtractLinkType, PIPELINE_TIME_FORMAT from pipeline.service import task_service class Strategies(SoftDeleteModel): \"\"\"用户策略表\"\"\" strategy_id", "ExtractLink.objects.get(link_id=self.link_id).link_type except ExtractLink.DoesNotExist: return \"\" def get_extract(self): from apps.log_extract.handlers.extract import ExtractLinkFactory return ExtractLinkFactory.get_link(self.get_link_type())()", "蓝鲸日志平台 is licensed under the MIT License. License for BK-LOG 蓝鲸日志平台: -------------------------------------------------------------------- Permission", "def get_link(self): return ExtractLink.objects.filter(link_id=self.link_id).first() def total_elapsed(self): try: task_status = task_service.get_state(self.pipeline_id) except Exception: #", "\"file_path\", \"created_by\", \"remark\"] def search(self, keyword): if keyword: filter_query = [Q(**{f\"{field}__icontains\": keyword}) for", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", "执行结果里有云石返回的task_id cstone_upload_task_id = models.BigIntegerField(_(\"云石上传ID\"), null=True, blank=True) # 用于查询中转服务器到云石的上传情况 # 云石上待下载的文件路径 cstone_file_path = models.CharField(_(\"云石文件路径\"),", "source community by making BK-LOG 蓝鲸日志平台 available. Copyright (C) 2021 THL A29 Limited,", "included in all copies or substantial portions of the Software. THE SOFTWARE IS", "= _(\"下载文件统计\") class ExtractLink(OperateRecordModel): name = models.CharField(_(\"链路名称\"), max_length=255) link_id = models.AutoField(_(\"链路id\"), primary_key=True) link_type", "# 存在多主机,单主机日志下载的情况,因此有可能有些pipeline节点未执行 logger.info(\"pipeline任务不存在,pipeline_id=>[{}]\".format(self.pipeline_id)) return \"0s\" component_status_list = [] if \"activities\" not in self.pipeline_components_id:", "copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and", "THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "Meta: verbose_name = _(\"提取链路 (第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") verbose_name_plural = _(\"提取链路 (第一次配置链路之后 需要重新部署saas", "Meta: ordering = [\"-created_at\"] def get_link_type(self): try: return ExtractLink.objects.get(link_id=self.link_id).link_type except ExtractLink.DoesNotExist: return \"\"", "DEALINGS IN THE SOFTWARE. \"\"\" import operator from datetime import datetime from functools", "blank=True) # 查询上传脚本的执行结果, 执行结果里有云石返回的task_id cstone_upload_task_id = models.BigIntegerField(_(\"云石上传ID\"), null=True, blank=True) # 用于查询中转服务器到云石的上传情况 # 云石上待下载的文件路径", "import models from django.db.models import Q from django.utils.translation import ugettext_lazy as _ from", "db_index=True, null=True) class Meta: verbose_name = _(\"提取链路 (第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") verbose_name_plural =", "the Software, and to permit persons to whom the Software is furnished to", "making BK-LOG 蓝鲸日志平台 available. Copyright (C) 2021 THL A29 Limited, a Tencent company.", "apps.models import ( OperateRecordModel, SoftDeleteModel, JsonField, MultiStrSplitByCommaFieldText, OperateRecordModelManager, EncryptionField, ) from apps.log_extract.constants import", "OperateRecordModel, SoftDeleteModel, JsonField, MultiStrSplitByCommaFieldText, OperateRecordModelManager, EncryptionField, ) from apps.log_extract.constants import ExtractLinkType, PIPELINE_TIME_FORMAT from", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN", "( datetime.strptime(component[\"finish_time\"], PIPELINE_TIME_FORMAT) - datetime.strptime(component[\"start_time\"], PIPELINE_TIME_FORMAT) ).seconds for component in components if component[\"finish_time\"]", "cstone_upload_task_id = models.BigIntegerField(_(\"云石上传ID\"), null=True, blank=True) # 用于查询中转服务器到云石的上传情况 # 云石上待下载的文件路径 cstone_file_path = models.CharField(_(\"云石文件路径\"), default=None,", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,", "functools import reduce from typing import List from django.db import models from django.db.models", "following conditions: The above copyright notice and this permission notice shall be included", "max_length=64, null=True, blank=True) # 等到上传完毕后,调创建下载链接的API cstone_download_task_id = models.BigIntegerField(_(\"云石任务ID\"), null=True, blank=True) cstone_download_bk_biz_id = models.BigIntegerField(_(\"云石下载业务ID\"),", "def get_extract(self): from apps.log_extract.handlers.extract import ExtractLinkFactory return ExtractLinkFactory.get_link(self.get_link_type())() def get_link(self): return ExtractLink.objects.filter(link_id=self.link_id).first() def", "SOFTWARE. \"\"\" import operator from datetime import datetime from functools import reduce from", "field in self.search_fields] filter_q = reduce(operator.or_, filter_query) return self.filter(filter_q) return self class Tasks(OperateRecordModel):", "copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\",", "models.CharField(_(\"预览开始日期\"), null=True, blank=True, max_length=20) preview_end_time = models.CharField(_(\"预览结束日期\"), null=True, blank=True, max_length=20) ex_data = JsonField(_(\"额外数据\"),", "NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "null=True, blank=True) cos_file_name = models.CharField(_(\"cos对象文件名称\"), null=True, blank=True, max_length=255) link_id = models.IntegerField(_(\"链路id\"), null=True, blank=True)", "ip in self.ex_data.values()) all_pack_file_size = sum(int(ip.get(\"all_pack_file_size\", 0)) for ip in self.ex_data.values()) ret =", "The above copyright notice and this permission notice shall be included in all", "from pipeline.service import task_service class Strategies(SoftDeleteModel): \"\"\"用户策略表\"\"\" strategy_id = models.AutoField(_(\"策略ID\"), primary_key=True, default=None) bk_biz_id", "logger.info(\"pipeline任务不存在,pipeline_id=>[{}]\".format(self.pipeline_id)) return \"0s\" component_status_list = [] if \"activities\" not in self.pipeline_components_id: return \"0s\"", "null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_cos_bucket = models.CharField( _(\"腾讯云Cos桶名称\"), max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") ) qcloud_cos_region", "class TasksManager(OperateRecordModelManager): search_fields = [\"ip_list\", \"file_path\", \"created_by\", \"remark\"] def search(self, keyword): if keyword:", "all_file_num = sum(int(ip.get(\"file_count\", 0)) for ip in self.ex_data.values()) all_pack_file_size = sum(int(ip.get(\"all_pack_file_size\", 0)) for", "pipeline_id = models.CharField(_(\"流水线ID\"), max_length=128, null=True, blank=True, db_index=True) pipeline_components_id = JsonField(_(\"流水线组件ID\"), null=True, blank=True) job_task_id", "class Meta: ordering = [\"-updated_at\"] class TasksManager(OperateRecordModelManager): search_fields = [\"ip_list\", \"file_path\", \"created_by\", \"remark\"]", "&& 暂时只支持linux及安装了cgwin的系统)\") class ExtractLinkHost(models.Model): target_dir = models.CharField(_(\"挂载目录\"), max_length=255, default=\"\") bk_cloud_id = models.IntegerField(_(\"主机云区域id\")) ip", "self.filter(filter_q) return self class Tasks(OperateRecordModel): \"\"\"任务记录 一个\"下载\"行为记作一个\"Task\" \"\"\" objects = TasksManager() task_id =", "the MIT License. License for BK-LOG 蓝鲸日志平台: -------------------------------------------------------------------- Permission is hereby granted, free", "is licensed under the MIT License. License for BK-LOG 蓝鲸日志平台: -------------------------------------------------------------------- Permission is", "= models.AutoField(_(\"任务记录id\"), primary_key=True) bk_biz_id = models.IntegerField(_(\"业务id\"), db_index=True) ip_list = MultiStrSplitByCommaFieldText(_(\"业务机器ip\")) file_path = MultiStrSplitByCommaFieldText(_(\"文件列表\"))", "get_link(self): return ExtractLink.objects.filter(link_id=self.link_id).first() def total_elapsed(self): try: task_status = task_service.get_state(self.pipeline_id) except Exception: # 存在多主机,单主机日志下载的情况,因此有可能有些pipeline节点未执行", "= JsonField(_(\"过滤内容\"), null=True, blank=True) download_status = models.CharField(_(\"当前文件下载状态\"), max_length=64, null=True, blank=True) expiration_date = models.DateTimeField(_(\"任务过期时间\"),", "] return \" \".join(ret) download_file_detail.short_description = _(\"下载文件统计\") class ExtractLink(OperateRecordModel): name = models.CharField(_(\"链路名称\"), max_length=255)", "try: task_status[\"children\"][component_id][\"name\"] = component_info[\"name\"] component_status_list.append(task_status[\"children\"][component_id]) except KeyError: pass return f\"{self._cal_total_time(component_status_list)}s\" def _cal_total_time(self, components:", "f\"{_('下载文件总数量')}: {all_file_num}\", ] return \" \".join(ret) download_file_detail.short_description = _(\"下载文件统计\") class ExtractLink(OperateRecordModel): name =", "return self.filter(filter_q) return self class Tasks(OperateRecordModel): \"\"\"任务记录 一个\"下载\"行为记作一个\"Task\" \"\"\" objects = TasksManager() task_id", "\"Software\"), to deal in the Software without restriction, including without limitation the rights", "blank=True) preview_is_search_child = models.BooleanField(_(\"预览是否搜索子目录\"), default=False, blank=True) preview_start_time = models.CharField(_(\"预览开始日期\"), null=True, blank=True, max_length=20) preview_end_time", "deal in the Software without restriction, including without limitation the rights to use,", "granted, free of charge, to any person obtaining a copy of this software", "MultiStrSplitByCommaFieldText(_(\"文件列表\")) filter_type = models.CharField(_(\"过滤类型\"), max_length=16, null=True, blank=True) filter_content = JsonField(_(\"过滤内容\"), null=True, blank=True) download_status", "limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "components: List[dict]): return sum( [ ( datetime.strptime(component[\"finish_time\"], PIPELINE_TIME_FORMAT) - datetime.strptime(component[\"start_time\"], PIPELINE_TIME_FORMAT) ).seconds for", "EncryptionField(_(\"腾讯云SecretKey\"), default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_cos_bucket = models.CharField( _(\"腾讯云Cos桶名称\"), max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\")", "help_text=_(\"内网链路不需要填写\")) qcloud_secret_key = EncryptionField(_(\"腾讯云SecretKey\"), default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_cos_bucket = models.CharField( _(\"腾讯云Cos桶名称\"), max_length=255,", "preview_ip = models.TextField(_(\"预览地址ip\"), null=True, blank=True) preview_time_range = models.CharField(_(\"预览日期\"), max_length=10, null=True, blank=True) preview_is_search_child =", "JsonField(_(\"额外数据\"), null=True, blank=True) cos_file_name = models.CharField(_(\"cos对象文件名称\"), null=True, blank=True, max_length=255) link_id = models.IntegerField(_(\"链路id\"), null=True,", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "null=True, blank=True, db_index=True) pipeline_components_id = JsonField(_(\"流水线组件ID\"), null=True, blank=True) job_task_id = models.BigIntegerField(_(\"文件分发ID\"), null=True, blank=True)", "download_file_detail(self): all_file_size = sum(int(ip.get(\"all_origin_file_size\", 0)) for ip in self.ex_data.values()) all_file_num = sum(int(ip.get(\"file_count\", 0))", "_(\"IP数量\") def download_file_detail(self): all_file_size = sum(int(ip.get(\"all_origin_file_size\", 0)) for ip in self.ex_data.values()) all_file_num =", "ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "of this software and associated documentation files (the \"Software\"), to deal in the", "云石上待下载的文件路径 cstone_file_path = models.CharField(_(\"云石文件路径\"), default=None, max_length=64, null=True, blank=True) # 等到上传完毕后,调创建下载链接的API cstone_download_task_id = models.BigIntegerField(_(\"云石任务ID\"),", "db_index=True) pipeline_components_id = JsonField(_(\"流水线组件ID\"), null=True, blank=True) job_task_id = models.BigIntegerField(_(\"文件分发ID\"), null=True, blank=True) # 调创建上传任务的API", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO", "\"remark\"] def search(self, keyword): if keyword: filter_query = [Q(**{f\"{field}__icontains\": keyword}) for field in", "一个\"下载\"行为记作一个\"Task\" \"\"\" objects = TasksManager() task_id = models.AutoField(_(\"任务记录id\"), primary_key=True) bk_biz_id = models.IntegerField(_(\"业务id\"), db_index=True)", "null=True, blank=True) # 创建中转服务器到云石的上传任务 job_upload_task_id = models.BigIntegerField(_(\"任务上传ID\"), null=True, blank=True) # 查询上传脚本的执行结果, 执行结果里有云石返回的task_id cstone_upload_task_id", "null=True, blank=True) preview_is_search_child = models.BooleanField(_(\"预览是否搜索子目录\"), default=False, blank=True) preview_start_time = models.CharField(_(\"预览开始日期\"), null=True, blank=True, max_length=20)", "max_length=255) link_id = models.IntegerField(_(\"链路id\"), null=True, blank=True) class Meta: ordering = [\"-created_at\"] def get_link_type(self):", "sell copies of the Software, and to permit persons to whom the Software", "def total_elapsed(self): try: task_status = task_service.get_state(self.pipeline_id) except Exception: # 存在多主机,单主机日志下载的情况,因此有可能有些pipeline节点未执行 logger.info(\"pipeline任务不存在,pipeline_id=>[{}]\".format(self.pipeline_id)) return \"0s\"", "A29 Limited, a Tencent company. All rights reserved. BK-LOG 蓝鲸日志平台 is licensed under", "= models.CharField(_(\"过滤类型\"), max_length=16, null=True, blank=True) filter_content = JsonField(_(\"过滤内容\"), null=True, blank=True) download_status = models.CharField(_(\"当前文件下载状态\"),", "models.IntegerField(_(\"链路id\"), null=True, blank=True) class Meta: ordering = [\"-created_at\"] def get_link_type(self): try: return ExtractLink.objects.get(link_id=self.link_id).link_type", "from datetime import datetime from functools import reduce from typing import List from", "ip_num.short_description = _(\"IP数量\") def download_file_detail(self): all_file_size = sum(int(ip.get(\"all_origin_file_size\", 0)) for ip in self.ex_data.values())", "component_id, component_info in self.pipeline_components_id[\"activities\"].items(): # 这里有可能有些pipeline组件并未执行 try: task_status[\"children\"][component_id][\"name\"] = component_info[\"name\"] component_status_list.append(task_status[\"children\"][component_id]) except KeyError:", "auto_now_add=True, blank=True, db_index=True, null=True) class Meta: verbose_name = _(\"提取链路 (第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\")", "do so, subject to the following conditions: The above copyright notice and this", "in self.ex_data.values()) all_file_num = sum(int(ip.get(\"file_count\", 0)) for ip in self.ex_data.values()) all_pack_file_size = sum(int(ip.get(\"all_pack_file_size\",", "import reduce from typing import List from django.db import models from django.db.models import", "TasksManager() task_id = models.AutoField(_(\"任务记录id\"), primary_key=True) bk_biz_id = models.IntegerField(_(\"业务id\"), db_index=True) ip_list = MultiStrSplitByCommaFieldText(_(\"业务机器ip\")) file_path", "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" import operator", "\"0s\" for component_id, component_info in self.pipeline_components_id[\"activities\"].items(): # 这里有可能有些pipeline组件并未执行 try: task_status[\"children\"][component_id][\"name\"] = component_info[\"name\"] component_status_list.append(task_status[\"children\"][component_id])", "\"\"\" Tencent is pleased to support the open source community by making BK-LOG", "max_length=255, default=\"\") bk_cloud_id = models.IntegerField(_(\"主机云区域id\")) ip = models.GenericIPAddressField(_(\"主机ip\")) link = models.ForeignKey(ExtractLink, on_delete=models.CASCADE) class", "OTHER DEALINGS IN THE SOFTWARE. \"\"\" import operator from datetime import datetime from", "except Exception: # 存在多主机,单主机日志下载的情况,因此有可能有些pipeline节点未执行 logger.info(\"pipeline任务不存在,pipeline_id=>[{}]\".format(self.pipeline_id)) return \"0s\" component_status_list = [] if \"activities\" not", "null=True, blank=True) # 等到上传完毕后,调创建下载链接的API cstone_download_task_id = models.BigIntegerField(_(\"云石任务ID\"), null=True, blank=True) cstone_download_bk_biz_id = models.BigIntegerField(_(\"云石下载业务ID\"), null=True,", "self.ex_data.values()) all_pack_file_size = sum(int(ip.get(\"all_pack_file_size\", 0)) for ip in self.ex_data.values()) ret = [ f\"{_('下载文件总大小')}:", "is furnished to do so, subject to the following conditions: The above copyright", "All rights reserved. BK-LOG 蓝鲸日志平台 is licensed under the MIT License. License for", "MultiStrSplitByCommaFieldText, OperateRecordModelManager, EncryptionField, ) from apps.log_extract.constants import ExtractLinkType, PIPELINE_TIME_FORMAT from pipeline.service import task_service", "bk_biz_id = models.IntegerField(_(\"业务id\"), db_index=True) ip_list = MultiStrSplitByCommaFieldText(_(\"业务机器ip\")) file_path = MultiStrSplitByCommaFieldText(_(\"文件列表\")) filter_type = models.CharField(_(\"过滤类型\"),", "reduce(operator.or_, filter_query) return self.filter(filter_q) return self class Tasks(OperateRecordModel): \"\"\"任务记录 一个\"下载\"行为记作一个\"Task\" \"\"\" objects =", "KeyError: pass return f\"{self._cal_total_time(component_status_list)}s\" def _cal_total_time(self, components: List[dict]): return sum( [ ( datetime.strptime(component[\"finish_time\"],", "= [\"ip_list\", \"file_path\", \"created_by\", \"remark\"] def search(self, keyword): if keyword: filter_query = [Q(**{f\"{field}__icontains\":", "so, subject to the following conditions: The above copyright notice and this permission", "_(\"腾讯云Cos桶名称\"), max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") ) qcloud_cos_region = models.CharField( _(\"腾讯云Cos区域\"), max_length=255, default=\"\", blank=True,", "= models.CharField( _(\"腾讯云Cos桶名称\"), max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") ) qcloud_cos_region = models.CharField( _(\"腾讯云Cos区域\"), max_length=255,", "apps.log_extract.constants import ExtractLinkType, PIPELINE_TIME_FORMAT from pipeline.service import task_service class Strategies(SoftDeleteModel): \"\"\"用户策略表\"\"\" strategy_id =", "help_text=_(\"内网链路不需要填写\") ) is_enable = models.BooleanField(_(\"是否启用\"), default=True) created_at = models.DateTimeField(_(\"创建时间\"), auto_now_add=True, blank=True, db_index=True, null=True)", "get_extract(self): from apps.log_extract.handlers.extract import ExtractLinkFactory return ExtractLinkFactory.get_link(self.get_link_type())() def get_link(self): return ExtractLink.objects.filter(link_id=self.link_id).first() def total_elapsed(self):", "THL A29 Limited, a Tencent company. All rights reserved. BK-LOG 蓝鲸日志平台 is licensed", "= models.GenericIPAddressField(_(\"主机ip\")) link = models.ForeignKey(ExtractLink, on_delete=models.CASCADE) class Meta: verbose_name = _(\"链路中转机\") verbose_name_plural =", "typing import List from django.db import models from django.db.models import Q from django.utils.translation", "blank=True) # 用于查询中转服务器到云石的上传情况 # 云石上待下载的文件路径 cstone_file_path = models.CharField(_(\"云石文件路径\"), default=None, max_length=64, null=True, blank=True) #", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR", "THE SOFTWARE. \"\"\" import operator from datetime import datetime from functools import reduce", "of the Software, and to permit persons to whom the Software is furnished", "ExtractLink(OperateRecordModel): name = models.CharField(_(\"链路名称\"), max_length=255) link_id = models.AutoField(_(\"链路id\"), primary_key=True) link_type = models.CharField(_(\"链路类型\"), max_length=20,", "and/or sell copies of the Software, and to permit persons to whom the", "ip = models.GenericIPAddressField(_(\"主机ip\")) link = models.ForeignKey(ExtractLink, on_delete=models.CASCADE) class Meta: verbose_name = _(\"链路中转机\") verbose_name_plural", "= models.BigIntegerField(_(\"上传票据\"), null=True, blank=True) cstone_upload_random = models.TextField(_(\"上传随机值\"), null=True, blank=True) # 创建中转服务器到云石的上传任务 job_upload_task_id =", "null=True, blank=True) # 用于查询中转服务器到云石的上传情况 # 云石上待下载的文件路径 cstone_file_path = models.CharField(_(\"云石文件路径\"), default=None, max_length=64, null=True, blank=True)", "models.IntegerField(_(\"执行bk_biz_id\")) qcloud_secret_id = EncryptionField(_(\"腾讯云SecretId\"), default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_secret_key = EncryptionField(_(\"腾讯云SecretKey\"), default=\"\", null=True,", "keyword}) for field in self.search_fields] filter_q = reduce(operator.or_, filter_query) return self.filter(filter_q) return self", "self.ex_data.values()) ret = [ f\"{_('下载文件总大小')}: {all_pack_file_size}kb\", f\"{_('下载原始文件原始总大小')}: {all_file_size}kb\", f\"{_('下载文件总数量')}: {all_file_num}\", ] return \"", "of charge, to any person obtaining a copy of this software and associated", "(the \"Software\"), to deal in the Software without restriction, including without limitation the", "f\"{_('下载原始文件原始总大小')}: {all_file_size}kb\", f\"{_('下载文件总数量')}: {all_file_num}\", ] return \" \".join(ret) download_file_detail.short_description = _(\"下载文件统计\") class ExtractLink(OperateRecordModel):", "from apps.log_extract.handlers.extract import ExtractLinkFactory return ExtractLinkFactory.get_link(self.get_link_type())() def get_link(self): return ExtractLink.objects.filter(link_id=self.link_id).first() def total_elapsed(self): try:", "name = models.CharField(_(\"链路名称\"), max_length=255) link_id = models.AutoField(_(\"链路id\"), primary_key=True) link_type = models.CharField(_(\"链路类型\"), max_length=20, default=ExtractLinkType.COMMON.value)", "pipeline.service import task_service class Strategies(SoftDeleteModel): \"\"\"用户策略表\"\"\" strategy_id = models.AutoField(_(\"策略ID\"), primary_key=True, default=None) bk_biz_id =", "blank=True) expiration_date = models.DateTimeField(_(\"任务过期时间\"), default=None) pipeline_id = models.CharField(_(\"流水线ID\"), max_length=128, null=True, blank=True, db_index=True) pipeline_components_id", "preview_end_time = models.CharField(_(\"预览结束日期\"), null=True, blank=True, max_length=20) ex_data = JsonField(_(\"额外数据\"), null=True, blank=True) cos_file_name =", ") qcloud_cos_region = models.CharField( _(\"腾讯云Cos区域\"), max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") ) is_enable = models.BooleanField(_(\"是否启用\"),", "copyright notice and this permission notice shall be included in all copies or", "= models.IntegerField(_(\"执行bk_biz_id\")) qcloud_secret_id = EncryptionField(_(\"腾讯云SecretId\"), default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_secret_key = EncryptionField(_(\"腾讯云SecretKey\"), default=\"\",", "&& 暂时只支持linux及安装了cgwin的系统)\") verbose_name_plural = _(\"提取链路 (第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") class ExtractLinkHost(models.Model): target_dir =", "= [\"-updated_at\"] class TasksManager(OperateRecordModelManager): search_fields = [\"ip_list\", \"file_path\", \"created_by\", \"remark\"] def search(self, keyword):", "cos_file_name = models.CharField(_(\"cos对象文件名称\"), null=True, blank=True, max_length=255) link_id = models.IntegerField(_(\"链路id\"), null=True, blank=True) class Meta:", "ip_num(self): return len(self.ip_list) ip_num.short_description = _(\"IP数量\") def download_file_detail(self): all_file_size = sum(int(ip.get(\"all_origin_file_size\", 0)) for", "to permit persons to whom the Software is furnished to do so, subject", "null=True, blank=True) task_process_info = models.TextField(_(\"任务过程信息\"), null=True, blank=True) remark = models.TextField(_(\"备注\"), null=True, blank=True) preview_directory", "default=False, blank=True) preview_start_time = models.CharField(_(\"预览开始日期\"), null=True, blank=True, max_length=20) preview_end_time = models.CharField(_(\"预览结束日期\"), null=True, blank=True,", "max_length=64, default=\"\") class Meta: ordering = [\"-updated_at\"] class TasksManager(OperateRecordModelManager): search_fields = [\"ip_list\", \"file_path\",", "blank=True) download_status = models.CharField(_(\"当前文件下载状态\"), max_length=64, null=True, blank=True) expiration_date = models.DateTimeField(_(\"任务过期时间\"), default=None) pipeline_id =", "= models.IntegerField(_(\"链路id\"), null=True, blank=True) class Meta: ordering = [\"-created_at\"] def get_link_type(self): try: return", "in self.ex_data.values()) all_pack_file_size = sum(int(ip.get(\"all_pack_file_size\", 0)) for ip in self.ex_data.values()) ret = [", "JsonField(_(\"模块列表\")) visible_dir = MultiStrSplitByCommaFieldText(_(\"目录列表\")) file_type = MultiStrSplitByCommaFieldText(_(\"文件类型\")) operator = models.CharField(_(\"作业执行人\"), max_length=64, default=\"\") class", "= models.CharField(_(\"云石文件路径\"), default=None, max_length=64, null=True, blank=True) # 等到上传完毕后,调创建下载链接的API cstone_download_task_id = models.BigIntegerField(_(\"云石任务ID\"), null=True, blank=True)", "= models.CharField(_(\"预览目录\"), null=True, blank=True, max_length=255) preview_ip = models.TextField(_(\"预览地址ip\"), null=True, blank=True) preview_time_range = models.CharField(_(\"预览日期\"),", "= models.CharField( _(\"腾讯云Cos区域\"), max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") ) is_enable = models.BooleanField(_(\"是否启用\"), default=True) created_at", "需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") verbose_name_plural = _(\"提取链路 (第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") class ExtractLinkHost(models.Model): target_dir", "conditions: The above copyright notice and this permission notice shall be included in", "null=True, blank=True, max_length=255) preview_ip = models.TextField(_(\"预览地址ip\"), null=True, blank=True) preview_time_range = models.CharField(_(\"预览日期\"), max_length=10, null=True,", "THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "null=True, blank=True) cstone_upload_random = models.TextField(_(\"上传随机值\"), null=True, blank=True) # 创建中转服务器到云石的上传任务 job_upload_task_id = models.BigIntegerField(_(\"任务上传ID\"), null=True,", "default=None, max_length=64, null=True, blank=True) # 等到上传完毕后,调创建下载链接的API cstone_download_task_id = models.BigIntegerField(_(\"云石任务ID\"), null=True, blank=True) cstone_download_bk_biz_id =", "= models.CharField(_(\"目标选择类型\"), max_length=16) modules = JsonField(_(\"模块列表\")) visible_dir = MultiStrSplitByCommaFieldText(_(\"目录列表\")) file_type = MultiStrSplitByCommaFieldText(_(\"文件类型\")) operator", "qcloud_secret_id = EncryptionField(_(\"腾讯云SecretId\"), default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_secret_key = EncryptionField(_(\"腾讯云SecretKey\"), default=\"\", null=True, blank=True,", "apps.utils.log import logger from apps.models import ( OperateRecordModel, SoftDeleteModel, JsonField, MultiStrSplitByCommaFieldText, OperateRecordModelManager, EncryptionField,", "models.BigIntegerField(_(\"任务上传ID\"), null=True, blank=True) # 查询上传脚本的执行结果, 执行结果里有云石返回的task_id cstone_upload_task_id = models.BigIntegerField(_(\"云石上传ID\"), null=True, blank=True) # 用于查询中转服务器到云石的上传情况", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER", "Permission is hereby granted, free of charge, to any person obtaining a copy", "= [Q(**{f\"{field}__icontains\": keyword}) for field in self.search_fields] filter_q = reduce(operator.or_, filter_query) return self.filter(filter_q)", "# 等到上传完毕后,调创建下载链接的API cstone_download_task_id = models.BigIntegerField(_(\"云石任务ID\"), null=True, blank=True) cstone_download_bk_biz_id = models.BigIntegerField(_(\"云石下载业务ID\"), null=True, blank=True) cstone_download_ticket", "filter_query = [Q(**{f\"{field}__icontains\": keyword}) for field in self.search_fields] filter_q = reduce(operator.or_, filter_query) return", "be included in all copies or substantial portions of the Software. THE SOFTWARE", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "db_index=True) strategy_name = models.TextField(_(\"策略名称\")) user_list = MultiStrSplitByCommaFieldText(_(\"用户ID\")) select_type = models.CharField(_(\"目标选择类型\"), max_length=16) modules =", "max_length=20) ex_data = JsonField(_(\"额外数据\"), null=True, blank=True) cos_file_name = models.CharField(_(\"cos对象文件名称\"), null=True, blank=True, max_length=255) link_id", "Q from django.utils.translation import ugettext_lazy as _ from apps.utils.log import logger from apps.models", "ugettext_lazy as _ from apps.utils.log import logger from apps.models import ( OperateRecordModel, SoftDeleteModel,", "link_id = models.AutoField(_(\"链路id\"), primary_key=True) link_type = models.CharField(_(\"链路类型\"), max_length=20, default=ExtractLinkType.COMMON.value) operator = models.CharField(_(\"执行人\"), max_length=255)", "2021 THL A29 Limited, a Tencent company. All rights reserved. BK-LOG 蓝鲸日志平台 is", "import operator from datetime import datetime from functools import reduce from typing import", "from typing import List from django.db import models from django.db.models import Q from", "whom the Software is furnished to do so, subject to the following conditions:", "blank=True, max_length=20) ex_data = JsonField(_(\"额外数据\"), null=True, blank=True) cos_file_name = models.CharField(_(\"cos对象文件名称\"), null=True, blank=True, max_length=255)", "component[\"finish_time\"] is not None ] ) total_elapsed.short_description = _(\"总耗时\") def ip_num(self): return len(self.ip_list)", "keyword): if keyword: filter_query = [Q(**{f\"{field}__icontains\": keyword}) for field in self.search_fields] filter_q =", "models.IntegerField(_(\"主机云区域id\")) ip = models.GenericIPAddressField(_(\"主机ip\")) link = models.ForeignKey(ExtractLink, on_delete=models.CASCADE) class Meta: verbose_name = _(\"链路中转机\")", "models.CharField(_(\"预览日期\"), max_length=10, null=True, blank=True) preview_is_search_child = models.BooleanField(_(\"预览是否搜索子目录\"), default=False, blank=True) preview_start_time = models.CharField(_(\"预览开始日期\"), null=True,", "class Meta: verbose_name = _(\"提取链路 (第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") verbose_name_plural = _(\"提取链路 (第一次配置链路之后", "return ExtractLink.objects.get(link_id=self.link_id).link_type except ExtractLink.DoesNotExist: return \"\" def get_extract(self): from apps.log_extract.handlers.extract import ExtractLinkFactory return", "django.db import models from django.db.models import Q from django.utils.translation import ugettext_lazy as _", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\"", "Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved. BK-LOG", "[\"-updated_at\"] class TasksManager(OperateRecordModelManager): search_fields = [\"ip_list\", \"file_path\", \"created_by\", \"remark\"] def search(self, keyword): if", "for component_id, component_info in self.pipeline_components_id[\"activities\"].items(): # 这里有可能有些pipeline组件并未执行 try: task_status[\"children\"][component_id][\"name\"] = component_info[\"name\"] component_status_list.append(task_status[\"children\"][component_id]) except", "FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "models.CharField(_(\"当前文件下载状态\"), max_length=64, null=True, blank=True) expiration_date = models.DateTimeField(_(\"任务过期时间\"), default=None) pipeline_id = models.CharField(_(\"流水线ID\"), max_length=128, null=True,", "self class Tasks(OperateRecordModel): \"\"\"任务记录 一个\"下载\"行为记作一个\"Task\" \"\"\" objects = TasksManager() task_id = models.AutoField(_(\"任务记录id\"), primary_key=True)", "component_status_list = [] if \"activities\" not in self.pipeline_components_id: return \"0s\" for component_id, component_info", "default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_secret_key = EncryptionField(_(\"腾讯云SecretKey\"), default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_cos_bucket =", "visible_dir = MultiStrSplitByCommaFieldText(_(\"目录列表\")) file_type = MultiStrSplitByCommaFieldText(_(\"文件类型\")) operator = models.CharField(_(\"作业执行人\"), max_length=64, default=\"\") class Meta:", "blank=True) preview_time_range = models.CharField(_(\"预览日期\"), max_length=10, null=True, blank=True) preview_is_search_child = models.BooleanField(_(\"预览是否搜索子目录\"), default=False, blank=True) preview_start_time", "not None ] ) total_elapsed.short_description = _(\"总耗时\") def ip_num(self): return len(self.ip_list) ip_num.short_description =", "null=True, blank=True) download_status = models.CharField(_(\"当前文件下载状态\"), max_length=64, null=True, blank=True) expiration_date = models.DateTimeField(_(\"任务过期时间\"), default=None) pipeline_id", "from apps.models import ( OperateRecordModel, SoftDeleteModel, JsonField, MultiStrSplitByCommaFieldText, OperateRecordModelManager, EncryptionField, ) from apps.log_extract.constants", "portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "class Meta: ordering = [\"-created_at\"] def get_link_type(self): try: return ExtractLink.objects.get(link_id=self.link_id).link_type except ExtractLink.DoesNotExist: return", "= models.DateTimeField(_(\"创建时间\"), auto_now_add=True, blank=True, db_index=True, null=True) class Meta: verbose_name = _(\"提取链路 (第一次配置链路之后 需要重新部署saas", "if \"activities\" not in self.pipeline_components_id: return \"0s\" for component_id, component_info in self.pipeline_components_id[\"activities\"].items(): #", "ExtractLinkFactory return ExtractLinkFactory.get_link(self.get_link_type())() def get_link(self): return ExtractLink.objects.filter(link_id=self.link_id).first() def total_elapsed(self): try: task_status = task_service.get_state(self.pipeline_id)", "DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "blank=True) # 根据票据向云石网盘发起下载请求 cstone_download_random = models.TextField(_(\"下载随机值\"), null=True, blank=True) task_process_info = models.TextField(_(\"任务过程信息\"), null=True, blank=True)", "OperateRecordModelManager, EncryptionField, ) from apps.log_extract.constants import ExtractLinkType, PIPELINE_TIME_FORMAT from pipeline.service import task_service class", "distribute, sublicense, and/or sell copies of the Software, and to permit persons to", "of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "software and associated documentation files (the \"Software\"), to deal in the Software without", "blank=True, db_index=True) pipeline_components_id = JsonField(_(\"流水线组件ID\"), null=True, blank=True) job_task_id = models.BigIntegerField(_(\"文件分发ID\"), null=True, blank=True) #", "= models.CharField(_(\"作业执行人\"), max_length=64, default=\"\") class Meta: ordering = [\"-updated_at\"] class TasksManager(OperateRecordModelManager): search_fields =", "\".join(ret) download_file_detail.short_description = _(\"下载文件统计\") class ExtractLink(OperateRecordModel): name = models.CharField(_(\"链路名称\"), max_length=255) link_id = models.AutoField(_(\"链路id\"),", "from apps.utils.log import logger from apps.models import ( OperateRecordModel, SoftDeleteModel, JsonField, MultiStrSplitByCommaFieldText, OperateRecordModelManager,", "filter_content = JsonField(_(\"过滤内容\"), null=True, blank=True) download_status = models.CharField(_(\"当前文件下载状态\"), max_length=64, null=True, blank=True) expiration_date =", "= models.CharField(_(\"预览结束日期\"), null=True, blank=True, max_length=20) ex_data = JsonField(_(\"额外数据\"), null=True, blank=True) cos_file_name = models.CharField(_(\"cos对象文件名称\"),", "coding=utf-8 \"\"\" Tencent is pleased to support the open source community by making", "cstone_upload_ticket = models.BigIntegerField(_(\"上传票据\"), null=True, blank=True) cstone_upload_random = models.TextField(_(\"上传随机值\"), null=True, blank=True) # 创建中转服务器到云石的上传任务 job_upload_task_id", "= models.CharField(_(\"cos对象文件名称\"), null=True, blank=True, max_length=255) link_id = models.IntegerField(_(\"链路id\"), null=True, blank=True) class Meta: ordering", "shall be included in all copies or substantial portions of the Software. THE", "default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_cos_bucket = models.CharField( _(\"腾讯云Cos桶名称\"), max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") )", "null=True, blank=True) filter_content = JsonField(_(\"过滤内容\"), null=True, blank=True) download_status = models.CharField(_(\"当前文件下载状态\"), max_length=64, null=True, blank=True)", "NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "= MultiStrSplitByCommaFieldText(_(\"文件类型\")) operator = models.CharField(_(\"作业执行人\"), max_length=64, default=\"\") class Meta: ordering = [\"-updated_at\"] class", "MultiStrSplitByCommaFieldText(_(\"文件类型\")) operator = models.CharField(_(\"作业执行人\"), max_length=64, default=\"\") class Meta: ordering = [\"-updated_at\"] class TasksManager(OperateRecordModelManager):", "for ip in self.ex_data.values()) all_file_num = sum(int(ip.get(\"file_count\", 0)) for ip in self.ex_data.values()) all_pack_file_size", "open source community by making BK-LOG 蓝鲸日志平台 available. Copyright (C) 2021 THL A29", "cstone_download_random = models.TextField(_(\"下载随机值\"), null=True, blank=True) task_process_info = models.TextField(_(\"任务过程信息\"), null=True, blank=True) remark = models.TextField(_(\"备注\"),", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT", "创建中转服务器到云石的上传任务 job_upload_task_id = models.BigIntegerField(_(\"任务上传ID\"), null=True, blank=True) # 查询上传脚本的执行结果, 执行结果里有云石返回的task_id cstone_upload_task_id = models.BigIntegerField(_(\"云石上传ID\"), null=True,", "= MultiStrSplitByCommaFieldText(_(\"目录列表\")) file_type = MultiStrSplitByCommaFieldText(_(\"文件类型\")) operator = models.CharField(_(\"作业执行人\"), max_length=64, default=\"\") class Meta: ordering", "try: task_status = task_service.get_state(self.pipeline_id) except Exception: # 存在多主机,单主机日志下载的情况,因此有可能有些pipeline节点未执行 logger.info(\"pipeline任务不存在,pipeline_id=>[{}]\".format(self.pipeline_id)) return \"0s\" component_status_list =", "models.CharField(_(\"云石文件路径\"), default=None, max_length=64, null=True, blank=True) # 等到上传完毕后,调创建下载链接的API cstone_download_task_id = models.BigIntegerField(_(\"云石任务ID\"), null=True, blank=True) cstone_download_bk_biz_id", "in components if component[\"finish_time\"] is not None ] ) total_elapsed.short_description = _(\"总耗时\") def", "the Software is furnished to do so, subject to the following conditions: The", "link_id = models.IntegerField(_(\"链路id\"), null=True, blank=True) class Meta: ordering = [\"-created_at\"] def get_link_type(self): try:", "blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_secret_key = EncryptionField(_(\"腾讯云SecretKey\"), default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_cos_bucket = models.CharField( _(\"腾讯云Cos桶名称\"),", "select_type = models.CharField(_(\"目标选择类型\"), max_length=16) modules = JsonField(_(\"模块列表\")) visible_dir = MultiStrSplitByCommaFieldText(_(\"目录列表\")) file_type = MultiStrSplitByCommaFieldText(_(\"文件类型\"))", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING", "= TasksManager() task_id = models.AutoField(_(\"任务记录id\"), primary_key=True) bk_biz_id = models.IntegerField(_(\"业务id\"), db_index=True) ip_list = MultiStrSplitByCommaFieldText(_(\"业务机器ip\"))", "# 创建中转服务器到云石的上传任务 job_upload_task_id = models.BigIntegerField(_(\"任务上传ID\"), null=True, blank=True) # 查询上传脚本的执行结果, 执行结果里有云石返回的task_id cstone_upload_task_id = models.BigIntegerField(_(\"云石上传ID\"),", "in self.ex_data.values()) ret = [ f\"{_('下载文件总大小')}: {all_pack_file_size}kb\", f\"{_('下载原始文件原始总大小')}: {all_file_size}kb\", f\"{_('下载文件总数量')}: {all_file_num}\", ] return", "is_enable = models.BooleanField(_(\"是否启用\"), default=True) created_at = models.DateTimeField(_(\"创建时间\"), auto_now_add=True, blank=True, db_index=True, null=True) class Meta:", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT", "a Tencent company. All rights reserved. BK-LOG 蓝鲸日志平台 is licensed under the MIT", "operator = models.CharField(_(\"执行人\"), max_length=255) op_bk_biz_id = models.IntegerField(_(\"执行bk_biz_id\")) qcloud_secret_id = EncryptionField(_(\"腾讯云SecretId\"), default=\"\", null=True, blank=True,", "EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS", "subject to the following conditions: The above copyright notice and this permission notice", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE", "= MultiStrSplitByCommaFieldText(_(\"文件列表\")) filter_type = models.CharField(_(\"过滤类型\"), max_length=16, null=True, blank=True) filter_content = JsonField(_(\"过滤内容\"), null=True, blank=True)", "max_length=255) link_id = models.AutoField(_(\"链路id\"), primary_key=True) link_type = models.CharField(_(\"链路类型\"), max_length=20, default=ExtractLinkType.COMMON.value) operator = models.CharField(_(\"执行人\"),", "models.GenericIPAddressField(_(\"主机ip\")) link = models.ForeignKey(ExtractLink, on_delete=models.CASCADE) class Meta: verbose_name = _(\"链路中转机\") verbose_name_plural = _(\"链路中转机\")", "job_upload_task_id = models.BigIntegerField(_(\"任务上传ID\"), null=True, blank=True) # 查询上传脚本的执行结果, 执行结果里有云石返回的task_id cstone_upload_task_id = models.BigIntegerField(_(\"云石上传ID\"), null=True, blank=True)", "models.TextField(_(\"任务过程信息\"), null=True, blank=True) remark = models.TextField(_(\"备注\"), null=True, blank=True) preview_directory = models.CharField(_(\"预览目录\"), null=True, blank=True,", "remark = models.TextField(_(\"备注\"), null=True, blank=True) preview_directory = models.CharField(_(\"预览目录\"), null=True, blank=True, max_length=255) preview_ip =", "try: return ExtractLink.objects.get(link_id=self.link_id).link_type except ExtractLink.DoesNotExist: return \"\" def get_extract(self): from apps.log_extract.handlers.extract import ExtractLinkFactory", "需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") class ExtractLinkHost(models.Model): target_dir = models.CharField(_(\"挂载目录\"), max_length=255, default=\"\") bk_cloud_id = models.IntegerField(_(\"主机云区域id\"))", "for ip in self.ex_data.values()) all_pack_file_size = sum(int(ip.get(\"all_pack_file_size\", 0)) for ip in self.ex_data.values()) ret", "(第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") verbose_name_plural = _(\"提取链路 (第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") class ExtractLinkHost(models.Model):", "is hereby granted, free of charge, to any person obtaining a copy of", "and associated documentation files (the \"Software\"), to deal in the Software without restriction,", "FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or", "null=True, blank=True) # 调创建上传任务的API cstone_upload_ticket = models.BigIntegerField(_(\"上传票据\"), null=True, blank=True) cstone_upload_random = models.TextField(_(\"上传随机值\"), null=True,", "= models.BigIntegerField(_(\"云石任务ID\"), null=True, blank=True) cstone_download_bk_biz_id = models.BigIntegerField(_(\"云石下载业务ID\"), null=True, blank=True) cstone_download_ticket = models.BigIntegerField(_(\"下载票据\"), null=True,", "operator from datetime import datetime from functools import reduce from typing import List", "_(\"提取链路 (第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") verbose_name_plural = _(\"提取链路 (第一次配置链路之后 需要重新部署saas && 暂时只支持linux及安装了cgwin的系统)\") class", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") ) is_enable = models.BooleanField(_(\"是否启用\"), default=True) created_at = models.DateTimeField(_(\"创建时间\"), auto_now_add=True,", "for BK-LOG 蓝鲸日志平台: -------------------------------------------------------------------- Permission is hereby granted, free of charge, to any", "self.pipeline_components_id[\"activities\"].items(): # 这里有可能有些pipeline组件并未执行 try: task_status[\"children\"][component_id][\"name\"] = component_info[\"name\"] component_status_list.append(task_status[\"children\"][component_id]) except KeyError: pass return f\"{self._cal_total_time(component_status_list)}s\"", "\"\" def get_extract(self): from apps.log_extract.handlers.extract import ExtractLinkFactory return ExtractLinkFactory.get_link(self.get_link_type())() def get_link(self): return ExtractLink.objects.filter(link_id=self.link_id).first()", "pipeline_components_id = JsonField(_(\"流水线组件ID\"), null=True, blank=True) job_task_id = models.BigIntegerField(_(\"文件分发ID\"), null=True, blank=True) # 调创建上传任务的API cstone_upload_ticket", "None ] ) total_elapsed.short_description = _(\"总耗时\") def ip_num(self): return len(self.ip_list) ip_num.short_description = _(\"IP数量\")", "datetime.strptime(component[\"start_time\"], PIPELINE_TIME_FORMAT) ).seconds for component in components if component[\"finish_time\"] is not None ]", "hereby granted, free of charge, to any person obtaining a copy of this", "return sum( [ ( datetime.strptime(component[\"finish_time\"], PIPELINE_TIME_FORMAT) - datetime.strptime(component[\"start_time\"], PIPELINE_TIME_FORMAT) ).seconds for component in", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE", "= models.CharField(_(\"链路类型\"), max_length=20, default=ExtractLinkType.COMMON.value) operator = models.CharField(_(\"执行人\"), max_length=255) op_bk_biz_id = models.IntegerField(_(\"执行bk_biz_id\")) qcloud_secret_id =", "blank=True) preview_start_time = models.CharField(_(\"预览开始日期\"), null=True, blank=True, max_length=20) preview_end_time = models.CharField(_(\"预览结束日期\"), null=True, blank=True, max_length=20)", "not in self.pipeline_components_id: return \"0s\" for component_id, component_info in self.pipeline_components_id[\"activities\"].items(): # 这里有可能有些pipeline组件并未执行 try:", "restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute,", "OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR", "search_fields = [\"ip_list\", \"file_path\", \"created_by\", \"remark\"] def search(self, keyword): if keyword: filter_query =", "rights reserved. BK-LOG 蓝鲸日志平台 is licensed under the MIT License. License for BK-LOG", "\"0s\" component_status_list = [] if \"activities\" not in self.pipeline_components_id: return \"0s\" for component_id,", "to the following conditions: The above copyright notice and this permission notice shall", "null=True, blank=True) preview_time_range = models.CharField(_(\"预览日期\"), max_length=10, null=True, blank=True) preview_is_search_child = models.BooleanField(_(\"预览是否搜索子目录\"), default=False, blank=True)", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "Software, and to permit persons to whom the Software is furnished to do", "null=True, blank=True) cstone_download_ticket = models.BigIntegerField(_(\"下载票据\"), null=True, blank=True) # 根据票据向云石网盘发起下载请求 cstone_download_random = models.TextField(_(\"下载随机值\"), null=True,", "null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_secret_key = EncryptionField(_(\"腾讯云SecretKey\"), default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_cos_bucket = models.CharField(", "EncryptionField(_(\"腾讯云SecretId\"), default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_secret_key = EncryptionField(_(\"腾讯云SecretKey\"), default=\"\", null=True, blank=True, help_text=_(\"内网链路不需要填写\")) qcloud_cos_bucket", "max_length=20, default=ExtractLinkType.COMMON.value) operator = models.CharField(_(\"执行人\"), max_length=255) op_bk_biz_id = models.IntegerField(_(\"执行bk_biz_id\")) qcloud_secret_id = EncryptionField(_(\"腾讯云SecretId\"), default=\"\",", "default=ExtractLinkType.COMMON.value) operator = models.CharField(_(\"执行人\"), max_length=255) op_bk_biz_id = models.IntegerField(_(\"执行bk_biz_id\")) qcloud_secret_id = EncryptionField(_(\"腾讯云SecretId\"), default=\"\", null=True,", "import List from django.db import models from django.db.models import Q from django.utils.translation import", "_(\"总耗时\") def ip_num(self): return len(self.ip_list) ip_num.short_description = _(\"IP数量\") def download_file_detail(self): all_file_size = sum(int(ip.get(\"all_origin_file_size\",", "[\"ip_list\", \"file_path\", \"created_by\", \"remark\"] def search(self, keyword): if keyword: filter_query = [Q(**{f\"{field}__icontains\": keyword})", "[] if \"activities\" not in self.pipeline_components_id: return \"0s\" for component_id, component_info in self.pipeline_components_id[\"activities\"].items():", "default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") ) qcloud_cos_region = models.CharField( _(\"腾讯云Cos区域\"), max_length=255, default=\"\", blank=True, help_text=_(\"内网链路不需要填写\") )", "= task_service.get_state(self.pipeline_id) except Exception: # 存在多主机,单主机日志下载的情况,因此有可能有些pipeline节点未执行 logger.info(\"pipeline任务不存在,pipeline_id=>[{}]\".format(self.pipeline_id)) return \"0s\" component_status_list = [] if", "= sum(int(ip.get(\"all_pack_file_size\", 0)) for ip in self.ex_data.values()) ret = [ f\"{_('下载文件总大小')}: {all_pack_file_size}kb\", f\"{_('下载原始文件原始总大小')}:", ") from apps.log_extract.constants import ExtractLinkType, PIPELINE_TIME_FORMAT from pipeline.service import task_service class Strategies(SoftDeleteModel): \"\"\"用户策略表\"\"\"", "= MultiStrSplitByCommaFieldText(_(\"业务机器ip\")) file_path = MultiStrSplitByCommaFieldText(_(\"文件列表\")) filter_type = models.CharField(_(\"过滤类型\"), max_length=16, null=True, blank=True) filter_content =", "filter_type = models.CharField(_(\"过滤类型\"), max_length=16, null=True, blank=True) filter_content = JsonField(_(\"过滤内容\"), null=True, blank=True) download_status =", "MultiStrSplitByCommaFieldText(_(\"目录列表\")) file_type = MultiStrSplitByCommaFieldText(_(\"文件类型\")) operator = models.CharField(_(\"作业执行人\"), max_length=64, default=\"\") class Meta: ordering =" ]
[ "Sort implementation.\"\"\" length = len(lst) counter = 0 lst_copy = lst for i", "list: \"\"\"Returns two lists merged into one.\"\"\" counter = 0 new_list = []", "list) -> list: \"\"\"Returns two lists merged into one.\"\"\" counter = 0 new_list", "+= 1 new_list.extend(lst1[one:]) new_list.extend(lst2[two:]) return (new_list, counter) def merge_sort(lst: list) -> list: \"\"\"Merge", "len(result)-1: lst1 = result[i] lst2 = result[i+1] new_list, num = (merge(lst1, lst2)) counter", "length): curr_element = lst_copy[i] idx = i-1 if not (lst_copy[idx] > curr_element and", "+= 2 if len(result) != 0: lst_copy[:] = result[-1][:] return (lst_copy, counter) def", "curr_element and idx >= 0): counter += 1 else: while lst_copy[idx] > curr_element", "implementation.\"\"\" counter = 0 lst_copy = lst length = len(lst_copy) interval = length", "lst_copy[j-interval], lst_copy[j] j -= interval else: break lst_copy[j] = temp interval = interval", "0: counter += 1 lst_copy[idx+1] = lst_copy[idx] idx -= 1 if idx >=", "while j >= interval and lst_copy[j-interval] > temp: if lst_copy[j-interval] > temp: counter", "2 if len(result) != 0: lst_copy[:] = result[-1][:] return (lst_copy, counter) def selection_sort(lst:", "0 lst_copy = lst for i in range(length - 1): min_index = i", ">= interval and lst_copy[j-interval] > temp: if lst_copy[j-interval] > temp: counter += 1", "2 while interval > 0: for i in range(interval, length): temp = lst_copy[i]", "+= 1 else: new_list.append(lst2[two]) two += 1 counter += 1 new_list.extend(lst1[one:]) new_list.extend(lst2[two:]) return", "\"\"\"Insertion Sort implementation.\"\"\" length = len(lst) lst_copy, counter = lst, 0 for i", "= lst for i in range(length - 1): min_index = i for j", "j -= interval else: break lst_copy[j] = temp interval = interval // 2", "and idx >= 0: counter += 1 lst_copy[idx+1] = lst_copy[idx] idx -= 1", "in range(len(lst_copy)): result.append([lst_copy[i]]) i = 0 while i < len(result)-1: lst1 = result[i]", "lst_copy[i] j = i counter += 1 while j >= interval and lst_copy[j-interval]", "def merge(lst1: list, lst2: list) -> list: \"\"\"Returns two lists merged into one.\"\"\"", "1 else: while lst_copy[idx] > curr_element and idx >= 0: counter += 1", "1 if idx >= 0: counter += 1 lst_copy[idx+1] = curr_element return counter", "one, two = 0, 0 while one != len(lst1) and two != len(lst2):", "0 lst_copy = lst length = len(lst_copy) interval = length // 2 while", "// 2 while interval > 0: for i in range(interval, length): temp =", "1, length): counter += 1 if lst_copy[min_index] > lst_copy[j]: min_index = j lst_copy[i],", "> curr_element and idx >= 0: counter += 1 lst_copy[idx+1] = lst_copy[idx] idx", "counter += num result.append(new_list) i += 2 if len(result) != 0: lst_copy[:] =", "i counter += 1 while j >= interval and lst_copy[j-interval] > temp: if", "in range(1, length): curr_element = lst_copy[i] idx = i-1 if not (lst_copy[idx] >", "one += 1 else: new_list.append(lst2[two]) two += 1 counter += 1 new_list.extend(lst1[one:]) new_list.extend(lst2[two:])", "0: for i in range(interval, length): temp = lst_copy[i] j = i counter", "-= 1 if idx >= 0: counter += 1 lst_copy[idx+1] = curr_element return", "= result[i] lst2 = result[i+1] new_list, num = (merge(lst1, lst2)) counter += num", "len(result) != 0: lst_copy[:] = result[-1][:] return (lst_copy, counter) def selection_sort(lst: list) ->", "+= 1 if lst_copy[min_index] > lst_copy[j]: min_index = j lst_copy[i], lst_copy[min_index] = lst_copy[min_index],", "(lst_copy[idx] > curr_element and idx >= 0): counter += 1 else: while lst_copy[idx]", "1 new_list.extend(lst1[one:]) new_list.extend(lst2[two:]) return (new_list, counter) def merge_sort(lst: list) -> list: \"\"\"Merge Sort", "len(lst) lst_copy, counter = lst, 0 for i in range(1, length): curr_element =", "and idx >= 0): counter += 1 else: while lst_copy[idx] > curr_element and", "1 while j >= interval and lst_copy[j-interval] > temp: if lst_copy[j-interval] > temp:", "while i < len(result)-1: lst1 = result[i] lst2 = result[i+1] new_list, num =", "two = 0, 0 while one != len(lst1) and two != len(lst2): if", "new_list.append(lst1[one]) one += 1 else: new_list.append(lst2[two]) two += 1 counter += 1 new_list.extend(lst1[one:])", "0: lst_copy[:] = result[-1][:] return (lst_copy, counter) def selection_sort(lst: list) -> list: \"\"\"Selection", "length = len(lst) counter = 0 lst_copy = lst for i in range(length", "counter) def merge_sort(lst: list) -> list: \"\"\"Merge Sort implementation.\"\"\" counter = 0 result", "= len(lst) counter = 0 lst_copy = lst for i in range(length -", "insertion_sort(lst: list) -> list: \"\"\"Insertion Sort implementation.\"\"\" length = len(lst) lst_copy, counter =", "shell_sort(lst: list): \"\"\"Shell sort implementation.\"\"\" counter = 0 lst_copy = lst length =", "implementation.\"\"\" length = len(lst) lst_copy, counter = lst, 0 for i in range(1,", "range(i + 1, length): counter += 1 if lst_copy[min_index] > lst_copy[j]: min_index =", "new_list.extend(lst2[two:]) return (new_list, counter) def merge_sort(lst: list) -> list: \"\"\"Merge Sort implementation.\"\"\" counter", "two += 1 counter += 1 new_list.extend(lst1[one:]) new_list.extend(lst2[two:]) return (new_list, counter) def merge_sort(lst:", "Sort implementation.\"\"\" counter = 0 result = [] lst_copy = lst for i", "result[i+1] new_list, num = (merge(lst1, lst2)) counter += num result.append(new_list) i += 2", "= 0 while i < len(result)-1: lst1 = result[i] lst2 = result[i+1] new_list,", "lst_copy[idx+1] = lst_copy[idx] idx -= 1 if idx >= 0: counter += 1", "length): temp = lst_copy[i] j = i counter += 1 while j >=", "Sort implementation.\"\"\" length = len(lst) lst_copy, counter = lst, 0 for i in", "= len(lst) lst_copy, counter = lst, 0 for i in range(1, length): curr_element", "- 1): min_index = i for j in range(i + 1, length): counter", "selection_sort(lst: list) -> list: \"\"\"Selection Sort implementation.\"\"\" length = len(lst) counter = 0", "0 while i < len(result)-1: lst1 = result[i] lst2 = result[i+1] new_list, num", "while lst_copy[idx] > curr_element and idx >= 0: counter += 1 lst_copy[idx+1] =", "list: \"\"\"Insertion Sort implementation.\"\"\" length = len(lst) lst_copy, counter = lst, 0 for", "+= 1 while j >= interval and lst_copy[j-interval] > temp: if lst_copy[j-interval] >", "list: \"\"\"Merge Sort implementation.\"\"\" counter = 0 result = [] lst_copy = lst", "range(interval, length): temp = lst_copy[i] j = i counter += 1 while j", "counter = 0 new_list = [] one, two = 0, 0 while one", "idx >= 0: counter += 1 lst_copy[idx+1] = curr_element return counter def shell_sort(lst:", "> 0: for i in range(interval, length): temp = lst_copy[i] j = i", "= [] lst_copy = lst for i in range(len(lst_copy)): result.append([lst_copy[i]]) i = 0", "temp: counter += 1 lst_copy[j], lst_copy[j - interval] = lst_copy[j-interval], lst_copy[j] j -=", "result.append(new_list) i += 2 if len(result) != 0: lst_copy[:] = result[-1][:] return (lst_copy,", "lst_copy, counter = lst, 0 for i in range(1, length): curr_element = lst_copy[i]", "return counter def shell_sort(lst: list): \"\"\"Shell sort implementation.\"\"\" counter = 0 lst_copy =", "counter def shell_sort(lst: list): \"\"\"Shell sort implementation.\"\"\" counter = 0 lst_copy = lst", "lst length = len(lst_copy) interval = length // 2 while interval > 0:", "lst2)) counter += num result.append(new_list) i += 2 if len(result) != 0: lst_copy[:]", "new_list.append(lst2[two]) two += 1 counter += 1 new_list.extend(lst1[one:]) new_list.extend(lst2[two:]) return (new_list, counter) def", "length = len(lst) lst_copy, counter = lst, 0 for i in range(1, length):", "counter = lst, 0 for i in range(1, length): curr_element = lst_copy[i] idx", "lst_copy[j]: min_index = j lst_copy[i], lst_copy[min_index] = lst_copy[min_index], lst_copy[i] return counter def insertion_sort(lst:", "counter += 1 new_list.extend(lst1[one:]) new_list.extend(lst2[two:]) return (new_list, counter) def merge_sort(lst: list) -> list:", "lst_copy = lst for i in range(length - 1): min_index = i for", "while one != len(lst1) and two != len(lst2): if lst1[one] <= lst2[two]: new_list.append(lst1[one])", "0 for i in range(1, length): curr_element = lst_copy[i] idx = i-1 if", "merge(lst1: list, lst2: list) -> list: \"\"\"Returns two lists merged into one.\"\"\" counter", "+= 1 lst_copy[idx+1] = curr_element return counter def shell_sort(lst: list): \"\"\"Shell sort implementation.\"\"\"", "lst_copy = lst for i in range(len(lst_copy)): result.append([lst_copy[i]]) i = 0 while i", "idx -= 1 if idx >= 0: counter += 1 lst_copy[idx+1] = curr_element", "> curr_element and idx >= 0): counter += 1 else: while lst_copy[idx] >", "one != len(lst1) and two != len(lst2): if lst1[one] <= lst2[two]: new_list.append(lst1[one]) one", "if not (lst_copy[idx] > curr_element and idx >= 0): counter += 1 else:", "counter += 1 lst_copy[idx+1] = curr_element return counter def shell_sort(lst: list): \"\"\"Shell sort", "lst1[one] <= lst2[two]: new_list.append(lst1[one]) one += 1 else: new_list.append(lst2[two]) two += 1 counter", "!= 0: lst_copy[:] = result[-1][:] return (lst_copy, counter) def selection_sort(lst: list) -> list:", "not (lst_copy[idx] > curr_element and idx >= 0): counter += 1 else: while", "0, 0 while one != len(lst1) and two != len(lst2): if lst1[one] <=", "one.\"\"\" counter = 0 new_list = [] one, two = 0, 0 while", "for i in range(length - 1): min_index = i for j in range(i", "- interval] = lst_copy[j-interval], lst_copy[j] j -= interval else: break lst_copy[j] = temp", "= lst_copy[i] idx = i-1 if not (lst_copy[idx] > curr_element and idx >=", "counter += 1 lst_copy[idx+1] = lst_copy[idx] idx -= 1 if idx >= 0:", "lst for i in range(length - 1): min_index = i for j in", "\"\"\"Returns two lists merged into one.\"\"\" counter = 0 new_list = [] one,", "for i in range(interval, length): temp = lst_copy[i] j = i counter +=", "-> list: \"\"\"Insertion Sort implementation.\"\"\" length = len(lst) lst_copy, counter = lst, 0", "curr_element and idx >= 0: counter += 1 lst_copy[idx+1] = lst_copy[idx] idx -=", "= length // 2 while interval > 0: for i in range(interval, length):", "lst_copy[idx] > curr_element and idx >= 0: counter += 1 lst_copy[idx+1] = lst_copy[idx]", "= lst_copy[idx] idx -= 1 if idx >= 0: counter += 1 lst_copy[idx+1]", "\"\"\"Shell sort implementation.\"\"\" counter = 0 lst_copy = lst length = len(lst_copy) interval", "idx >= 0): counter += 1 else: while lst_copy[idx] > curr_element and idx", "num result.append(new_list) i += 2 if len(result) != 0: lst_copy[:] = result[-1][:] return", "+= 1 counter += 1 new_list.extend(lst1[one:]) new_list.extend(lst2[two:]) return (new_list, counter) def merge_sort(lst: list)", "in range(interval, length): temp = lst_copy[i] j = i counter += 1 while", "interval > 0: for i in range(interval, length): temp = lst_copy[i] j =", "else: new_list.append(lst2[two]) two += 1 counter += 1 new_list.extend(lst1[one:]) new_list.extend(lst2[two:]) return (new_list, counter)", "1 else: new_list.append(lst2[two]) two += 1 counter += 1 new_list.extend(lst1[one:]) new_list.extend(lst2[two:]) return (new_list,", "lst for i in range(len(lst_copy)): result.append([lst_copy[i]]) i = 0 while i < len(result)-1:", "-> list: \"\"\"Selection Sort implementation.\"\"\" length = len(lst) counter = 0 lst_copy =", "i in range(length - 1): min_index = i for j in range(i +", "0 while one != len(lst1) and two != len(lst2): if lst1[one] <= lst2[two]:", "range(1, length): curr_element = lst_copy[i] idx = i-1 if not (lst_copy[idx] > curr_element", "1 lst_copy[idx+1] = lst_copy[idx] idx -= 1 if idx >= 0: counter +=", "1 lst_copy[idx+1] = curr_element return counter def shell_sort(lst: list): \"\"\"Shell sort implementation.\"\"\" counter", "len(lst2): if lst1[one] <= lst2[two]: new_list.append(lst1[one]) one += 1 else: new_list.append(lst2[two]) two +=", "counter = 0 result = [] lst_copy = lst for i in range(len(lst_copy)):", "1): min_index = i for j in range(i + 1, length): counter +=", "lst_copy[min_index] > lst_copy[j]: min_index = j lst_copy[i], lst_copy[min_index] = lst_copy[min_index], lst_copy[i] return counter", "lst_copy[j-interval] > temp: counter += 1 lst_copy[j], lst_copy[j - interval] = lst_copy[j-interval], lst_copy[j]", "counter = 0 lst_copy = lst length = len(lst_copy) interval = length //", "-> list: \"\"\"Merge Sort implementation.\"\"\" counter = 0 result = [] lst_copy =", "lst1 = result[i] lst2 = result[i+1] new_list, num = (merge(lst1, lst2)) counter +=", "(lst_copy, counter) def selection_sort(lst: list) -> list: \"\"\"Selection Sort implementation.\"\"\" length = len(lst)", "length = len(lst_copy) interval = length // 2 while interval > 0: for", "counter) def selection_sort(lst: list) -> list: \"\"\"Selection Sort implementation.\"\"\" length = len(lst) counter", "lists merged into one.\"\"\" counter = 0 new_list = [] one, two =", "idx = i-1 if not (lst_copy[idx] > curr_element and idx >= 0): counter", "num = (merge(lst1, lst2)) counter += num result.append(new_list) i += 2 if len(result)", "merged into one.\"\"\" counter = 0 new_list = [] one, two = 0,", "curr_element = lst_copy[i] idx = i-1 if not (lst_copy[idx] > curr_element and idx", "0): counter += 1 else: while lst_copy[idx] > curr_element and idx >= 0:", "return counter def insertion_sort(lst: list) -> list: \"\"\"Insertion Sort implementation.\"\"\" length = len(lst)", "list): \"\"\"Shell sort implementation.\"\"\" counter = 0 lst_copy = lst length = len(lst_copy)", "len(lst_copy) interval = length // 2 while interval > 0: for i in", "1 lst_copy[j], lst_copy[j - interval] = lst_copy[j-interval], lst_copy[j] j -= interval else: break", "new_list = [] one, two = 0, 0 while one != len(lst1) and", "and lst_copy[j-interval] > temp: if lst_copy[j-interval] > temp: counter += 1 lst_copy[j], lst_copy[j", "def selection_sort(lst: list) -> list: \"\"\"Selection Sort implementation.\"\"\" length = len(lst) counter =", "(new_list, counter) def merge_sort(lst: list) -> list: \"\"\"Merge Sort implementation.\"\"\" counter = 0", "lst2[two]: new_list.append(lst1[one]) one += 1 else: new_list.append(lst2[two]) two += 1 counter += 1", "= i counter += 1 while j >= interval and lst_copy[j-interval] > temp:", "+= 1 lst_copy[j], lst_copy[j - interval] = lst_copy[j-interval], lst_copy[j] j -= interval else:", "list) -> list: \"\"\"Insertion Sort implementation.\"\"\" length = len(lst) lst_copy, counter = lst,", "i += 2 if len(result) != 0: lst_copy[:] = result[-1][:] return (lst_copy, counter)", "i in range(len(lst_copy)): result.append([lst_copy[i]]) i = 0 while i < len(result)-1: lst1 =", "i < len(result)-1: lst1 = result[i] lst2 = result[i+1] new_list, num = (merge(lst1,", "in range(length - 1): min_index = i for j in range(i + 1,", "counter = 0 lst_copy = lst for i in range(length - 1): min_index", "lst, 0 for i in range(1, length): curr_element = lst_copy[i] idx = i-1", "i in range(interval, length): temp = lst_copy[i] j = i counter += 1", "= lst_copy[j-interval], lst_copy[j] j -= interval else: break lst_copy[j] = temp interval =", "algorithms\"\"\" def merge(lst1: list, lst2: list) -> list: \"\"\"Returns two lists merged into", "= result[i+1] new_list, num = (merge(lst1, lst2)) counter += num result.append(new_list) i +=", "lst_copy[min_index], lst_copy[i] return counter def insertion_sort(lst: list) -> list: \"\"\"Insertion Sort implementation.\"\"\" length", "= curr_element return counter def shell_sort(lst: list): \"\"\"Shell sort implementation.\"\"\" counter = 0", "curr_element return counter def shell_sort(lst: list): \"\"\"Shell sort implementation.\"\"\" counter = 0 lst_copy", "merge_sort(lst: list) -> list: \"\"\"Merge Sort implementation.\"\"\" counter = 0 result = []", "0 result = [] lst_copy = lst for i in range(len(lst_copy)): result.append([lst_copy[i]]) i", "!= len(lst1) and two != len(lst2): if lst1[one] <= lst2[two]: new_list.append(lst1[one]) one +=", "len(lst1) and two != len(lst2): if lst1[one] <= lst2[two]: new_list.append(lst1[one]) one += 1", "if lst_copy[min_index] > lst_copy[j]: min_index = j lst_copy[i], lst_copy[min_index] = lst_copy[min_index], lst_copy[i] return", "lst_copy[idx+1] = curr_element return counter def shell_sort(lst: list): \"\"\"Shell sort implementation.\"\"\" counter =", "interval else: break lst_copy[j] = temp interval = interval // 2 return counter", "return (new_list, counter) def merge_sort(lst: list) -> list: \"\"\"Merge Sort implementation.\"\"\" counter =", "new_list, num = (merge(lst1, lst2)) counter += num result.append(new_list) i += 2 if", ">= 0): counter += 1 else: while lst_copy[idx] > curr_element and idx >=", "+ 1, length): counter += 1 if lst_copy[min_index] > lst_copy[j]: min_index = j", "+= 1 lst_copy[idx+1] = lst_copy[idx] idx -= 1 if idx >= 0: counter", ">= 0: counter += 1 lst_copy[idx+1] = curr_element return counter def shell_sort(lst: list):", "[] one, two = 0, 0 while one != len(lst1) and two !=", "i = 0 while i < len(result)-1: lst1 = result[i] lst2 = result[i+1]", "> temp: if lst_copy[j-interval] > temp: counter += 1 lst_copy[j], lst_copy[j - interval]", "lst_copy[j], lst_copy[j - interval] = lst_copy[j-interval], lst_copy[j] j -= interval else: break lst_copy[j]", "-> list: \"\"\"Returns two lists merged into one.\"\"\" counter = 0 new_list =", "-= interval else: break lst_copy[j] = temp interval = interval // 2 return", "lst2: list) -> list: \"\"\"Returns two lists merged into one.\"\"\" counter = 0", "= 0 new_list = [] one, two = 0, 0 while one !=", "list: \"\"\"Selection Sort implementation.\"\"\" length = len(lst) counter = 0 lst_copy = lst", "into one.\"\"\" counter = 0 new_list = [] one, two = 0, 0", "range(length - 1): min_index = i for j in range(i + 1, length):", "lst_copy[:] = result[-1][:] return (lst_copy, counter) def selection_sort(lst: list) -> list: \"\"\"Selection Sort", "and two != len(lst2): if lst1[one] <= lst2[two]: new_list.append(lst1[one]) one += 1 else:", "result[-1][:] return (lst_copy, counter) def selection_sort(lst: list) -> list: \"\"\"Selection Sort implementation.\"\"\" length", "length // 2 while interval > 0: for i in range(interval, length): temp", "= [] one, two = 0, 0 while one != len(lst1) and two", "lst2 = result[i+1] new_list, num = (merge(lst1, lst2)) counter += num result.append(new_list) i", "result = [] lst_copy = lst for i in range(len(lst_copy)): result.append([lst_copy[i]]) i =", "new_list.extend(lst1[one:]) new_list.extend(lst2[two:]) return (new_list, counter) def merge_sort(lst: list) -> list: \"\"\"Merge Sort implementation.\"\"\"", "return (lst_copy, counter) def selection_sort(lst: list) -> list: \"\"\"Selection Sort implementation.\"\"\" length =", "counter += 1 while j >= interval and lst_copy[j-interval] > temp: if lst_copy[j-interval]", "result[i] lst2 = result[i+1] new_list, num = (merge(lst1, lst2)) counter += num result.append(new_list)", "lst_copy[j] j -= interval else: break lst_copy[j] = temp interval = interval //", "= lst, 0 for i in range(1, length): curr_element = lst_copy[i] idx =", "i-1 if not (lst_copy[idx] > curr_element and idx >= 0): counter += 1", "def shell_sort(lst: list): \"\"\"Shell sort implementation.\"\"\" counter = 0 lst_copy = lst length", "lst_copy[j - interval] = lst_copy[j-interval], lst_copy[j] j -= interval else: break lst_copy[j] =", "\"\"\"Merge Sort implementation.\"\"\" counter = 0 result = [] lst_copy = lst for", "\"\"\"sorting algorithms\"\"\" def merge(lst1: list, lst2: list) -> list: \"\"\"Returns two lists merged", "def insertion_sort(lst: list) -> list: \"\"\"Insertion Sort implementation.\"\"\" length = len(lst) lst_copy, counter", "def merge_sort(lst: list) -> list: \"\"\"Merge Sort implementation.\"\"\" counter = 0 result =", "= lst_copy[i] j = i counter += 1 while j >= interval and", "if lst_copy[j-interval] > temp: counter += 1 lst_copy[j], lst_copy[j - interval] = lst_copy[j-interval],", "range(len(lst_copy)): result.append([lst_copy[i]]) i = 0 while i < len(result)-1: lst1 = result[i] lst2", "counter += 1 else: while lst_copy[idx] > curr_element and idx >= 0: counter", "= (merge(lst1, lst2)) counter += num result.append(new_list) i += 2 if len(result) !=", "counter += 1 lst_copy[j], lst_copy[j - interval] = lst_copy[j-interval], lst_copy[j] j -= interval", "temp: if lst_copy[j-interval] > temp: counter += 1 lst_copy[j], lst_copy[j - interval] =", "<= lst2[two]: new_list.append(lst1[one]) one += 1 else: new_list.append(lst2[two]) two += 1 counter +=", "i for j in range(i + 1, length): counter += 1 if lst_copy[min_index]", "for j in range(i + 1, length): counter += 1 if lst_copy[min_index] >", "if len(result) != 0: lst_copy[:] = result[-1][:] return (lst_copy, counter) def selection_sort(lst: list)", "lst_copy[i] idx = i-1 if not (lst_copy[idx] > curr_element and idx >= 0):", "interval = length // 2 while interval > 0: for i in range(interval,", "list) -> list: \"\"\"Merge Sort implementation.\"\"\" counter = 0 result = [] lst_copy", "implementation.\"\"\" counter = 0 result = [] lst_copy = lst for i in", "sort implementation.\"\"\" counter = 0 lst_copy = lst length = len(lst_copy) interval =", "min_index = j lst_copy[i], lst_copy[min_index] = lst_copy[min_index], lst_copy[i] return counter def insertion_sort(lst: list)", "1 counter += 1 new_list.extend(lst1[one:]) new_list.extend(lst2[two:]) return (new_list, counter) def merge_sort(lst: list) ->", "= 0 lst_copy = lst length = len(lst_copy) interval = length // 2", "= result[-1][:] return (lst_copy, counter) def selection_sort(lst: list) -> list: \"\"\"Selection Sort implementation.\"\"\"", "list) -> list: \"\"\"Selection Sort implementation.\"\"\" length = len(lst) counter = 0 lst_copy", "len(lst) counter = 0 lst_copy = lst for i in range(length - 1):", "= i for j in range(i + 1, length): counter += 1 if", "lst_copy[min_index] = lst_copy[min_index], lst_copy[i] return counter def insertion_sort(lst: list) -> list: \"\"\"Insertion Sort", "lst_copy[i], lst_copy[min_index] = lst_copy[min_index], lst_copy[i] return counter def insertion_sort(lst: list) -> list: \"\"\"Insertion", ">= 0: counter += 1 lst_copy[idx+1] = lst_copy[idx] idx -= 1 if idx", "j >= interval and lst_copy[j-interval] > temp: if lst_copy[j-interval] > temp: counter +=", "= lst for i in range(len(lst_copy)): result.append([lst_copy[i]]) i = 0 while i <", "+= num result.append(new_list) i += 2 if len(result) != 0: lst_copy[:] = result[-1][:]", "length): counter += 1 if lst_copy[min_index] > lst_copy[j]: min_index = j lst_copy[i], lst_copy[min_index]", "1 if lst_copy[min_index] > lst_copy[j]: min_index = j lst_copy[i], lst_copy[min_index] = lst_copy[min_index], lst_copy[i]", "interval] = lst_copy[j-interval], lst_copy[j] j -= interval else: break lst_copy[j] = temp interval", "= 0 lst_copy = lst for i in range(length - 1): min_index =", "for i in range(len(lst_copy)): result.append([lst_copy[i]]) i = 0 while i < len(result)-1: lst1", "lst_copy[idx] idx -= 1 if idx >= 0: counter += 1 lst_copy[idx+1] =", "two lists merged into one.\"\"\" counter = 0 new_list = [] one, two", "i in range(1, length): curr_element = lst_copy[i] idx = i-1 if not (lst_copy[idx]", "j lst_copy[i], lst_copy[min_index] = lst_copy[min_index], lst_copy[i] return counter def insertion_sort(lst: list) -> list:", "> lst_copy[j]: min_index = j lst_copy[i], lst_copy[min_index] = lst_copy[min_index], lst_copy[i] return counter def", "0: counter += 1 lst_copy[idx+1] = curr_element return counter def shell_sort(lst: list): \"\"\"Shell", "< len(result)-1: lst1 = result[i] lst2 = result[i+1] new_list, num = (merge(lst1, lst2))", "lst_copy[j-interval] > temp: if lst_copy[j-interval] > temp: counter += 1 lst_copy[j], lst_copy[j -", "min_index = i for j in range(i + 1, length): counter += 1", "= lst length = len(lst_copy) interval = length // 2 while interval >", "= 0 result = [] lst_copy = lst for i in range(len(lst_copy)): result.append([lst_copy[i]])", "= i-1 if not (lst_copy[idx] > curr_element and idx >= 0): counter +=", "interval and lst_copy[j-interval] > temp: if lst_copy[j-interval] > temp: counter += 1 lst_copy[j],", "!= len(lst2): if lst1[one] <= lst2[two]: new_list.append(lst1[one]) one += 1 else: new_list.append(lst2[two]) two", "for i in range(1, length): curr_element = lst_copy[i] idx = i-1 if not", "in range(i + 1, length): counter += 1 if lst_copy[min_index] > lst_copy[j]: min_index", "\"\"\"Selection Sort implementation.\"\"\" length = len(lst) counter = 0 lst_copy = lst for", "list, lst2: list) -> list: \"\"\"Returns two lists merged into one.\"\"\" counter =", "if idx >= 0: counter += 1 lst_copy[idx+1] = curr_element return counter def", "= len(lst_copy) interval = length // 2 while interval > 0: for i", "while interval > 0: for i in range(interval, length): temp = lst_copy[i] j", "lst_copy[i] return counter def insertion_sort(lst: list) -> list: \"\"\"Insertion Sort implementation.\"\"\" length =", "(merge(lst1, lst2)) counter += num result.append(new_list) i += 2 if len(result) != 0:", "counter def insertion_sort(lst: list) -> list: \"\"\"Insertion Sort implementation.\"\"\" length = len(lst) lst_copy,", "if lst1[one] <= lst2[two]: new_list.append(lst1[one]) one += 1 else: new_list.append(lst2[two]) two += 1", "j = i counter += 1 while j >= interval and lst_copy[j-interval] >", "> temp: counter += 1 lst_copy[j], lst_copy[j - interval] = lst_copy[j-interval], lst_copy[j] j", "temp = lst_copy[i] j = i counter += 1 while j >= interval", "= lst_copy[min_index], lst_copy[i] return counter def insertion_sort(lst: list) -> list: \"\"\"Insertion Sort implementation.\"\"\"", "lst_copy = lst length = len(lst_copy) interval = length // 2 while interval", "0 new_list = [] one, two = 0, 0 while one != len(lst1)", "two != len(lst2): if lst1[one] <= lst2[two]: new_list.append(lst1[one]) one += 1 else: new_list.append(lst2[two])", "result.append([lst_copy[i]]) i = 0 while i < len(result)-1: lst1 = result[i] lst2 =", "implementation.\"\"\" length = len(lst) counter = 0 lst_copy = lst for i in", "idx >= 0: counter += 1 lst_copy[idx+1] = lst_copy[idx] idx -= 1 if", "else: while lst_copy[idx] > curr_element and idx >= 0: counter += 1 lst_copy[idx+1]", "[] lst_copy = lst for i in range(len(lst_copy)): result.append([lst_copy[i]]) i = 0 while", "counter += 1 if lst_copy[min_index] > lst_copy[j]: min_index = j lst_copy[i], lst_copy[min_index] =", "j in range(i + 1, length): counter += 1 if lst_copy[min_index] > lst_copy[j]:", "+= 1 else: while lst_copy[idx] > curr_element and idx >= 0: counter +=", "= j lst_copy[i], lst_copy[min_index] = lst_copy[min_index], lst_copy[i] return counter def insertion_sort(lst: list) ->", "= 0, 0 while one != len(lst1) and two != len(lst2): if lst1[one]" ]
[ "= os.getcwd() with tempfile.TemporaryDirectory() as tmp_path: os.chdir(tmp_path) result = func(*args, **kwargs) os.chdir(cwd) return", "import tarfile def TemporaryDirectory(func): '''This decorator creates temporary directory and wraps given fuction'''", "result return wrapper def make_tarfile(output_filename, source_dir): with tarfile.open(output_filename, \"w:gz\") as tar: tar.add(source_dir, arcname=os.path.basename(source_dir))", "tempfile.TemporaryDirectory() as tmp_path: os.chdir(tmp_path) result = func(*args, **kwargs) os.chdir(cwd) return result return wrapper", "def wrapper(*args, **kwargs): cwd = os.getcwd() with tempfile.TemporaryDirectory() as tmp_path: os.chdir(tmp_path) result =", "tmp_path: os.chdir(tmp_path) result = func(*args, **kwargs) os.chdir(cwd) return result return wrapper def make_tarfile(output_filename,", "func(*args, **kwargs) os.chdir(cwd) return result return wrapper def make_tarfile(output_filename, source_dir): with tarfile.open(output_filename, \"w:gz\")", "functools import wraps import os import tempfile import tarfile def TemporaryDirectory(func): '''This decorator", "tempfile import tarfile def TemporaryDirectory(func): '''This decorator creates temporary directory and wraps given", "os.getcwd() with tempfile.TemporaryDirectory() as tmp_path: os.chdir(tmp_path) result = func(*args, **kwargs) os.chdir(cwd) return result", "'''This decorator creates temporary directory and wraps given fuction''' @wraps(func) def wrapper(*args, **kwargs):", "directory and wraps given fuction''' @wraps(func) def wrapper(*args, **kwargs): cwd = os.getcwd() with", "tarfile def TemporaryDirectory(func): '''This decorator creates temporary directory and wraps given fuction''' @wraps(func)", "os.chdir(tmp_path) result = func(*args, **kwargs) os.chdir(cwd) return result return wrapper def make_tarfile(output_filename, source_dir):", "import tempfile import tarfile def TemporaryDirectory(func): '''This decorator creates temporary directory and wraps", "**kwargs): cwd = os.getcwd() with tempfile.TemporaryDirectory() as tmp_path: os.chdir(tmp_path) result = func(*args, **kwargs)", "return result return wrapper def make_tarfile(output_filename, source_dir): with tarfile.open(output_filename, \"w:gz\") as tar: tar.add(source_dir,", "fuction''' @wraps(func) def wrapper(*args, **kwargs): cwd = os.getcwd() with tempfile.TemporaryDirectory() as tmp_path: os.chdir(tmp_path)", "wrapper(*args, **kwargs): cwd = os.getcwd() with tempfile.TemporaryDirectory() as tmp_path: os.chdir(tmp_path) result = func(*args,", "**kwargs) os.chdir(cwd) return result return wrapper def make_tarfile(output_filename, source_dir): with tarfile.open(output_filename, \"w:gz\") as", "cwd = os.getcwd() with tempfile.TemporaryDirectory() as tmp_path: os.chdir(tmp_path) result = func(*args, **kwargs) os.chdir(cwd)", "with tempfile.TemporaryDirectory() as tmp_path: os.chdir(tmp_path) result = func(*args, **kwargs) os.chdir(cwd) return result return", "temporary directory and wraps given fuction''' @wraps(func) def wrapper(*args, **kwargs): cwd = os.getcwd()", "@wraps(func) def wrapper(*args, **kwargs): cwd = os.getcwd() with tempfile.TemporaryDirectory() as tmp_path: os.chdir(tmp_path) result", "wraps given fuction''' @wraps(func) def wrapper(*args, **kwargs): cwd = os.getcwd() with tempfile.TemporaryDirectory() as", "import wraps import os import tempfile import tarfile def TemporaryDirectory(func): '''This decorator creates", "creates temporary directory and wraps given fuction''' @wraps(func) def wrapper(*args, **kwargs): cwd =", "given fuction''' @wraps(func) def wrapper(*args, **kwargs): cwd = os.getcwd() with tempfile.TemporaryDirectory() as tmp_path:", "os.chdir(cwd) return result return wrapper def make_tarfile(output_filename, source_dir): with tarfile.open(output_filename, \"w:gz\") as tar:", "as tmp_path: os.chdir(tmp_path) result = func(*args, **kwargs) os.chdir(cwd) return result return wrapper def", "and wraps given fuction''' @wraps(func) def wrapper(*args, **kwargs): cwd = os.getcwd() with tempfile.TemporaryDirectory()", "wraps import os import tempfile import tarfile def TemporaryDirectory(func): '''This decorator creates temporary", "from functools import wraps import os import tempfile import tarfile def TemporaryDirectory(func): '''This", "import os import tempfile import tarfile def TemporaryDirectory(func): '''This decorator creates temporary directory", "os import tempfile import tarfile def TemporaryDirectory(func): '''This decorator creates temporary directory and", "TemporaryDirectory(func): '''This decorator creates temporary directory and wraps given fuction''' @wraps(func) def wrapper(*args,", "decorator creates temporary directory and wraps given fuction''' @wraps(func) def wrapper(*args, **kwargs): cwd", "= func(*args, **kwargs) os.chdir(cwd) return result return wrapper def make_tarfile(output_filename, source_dir): with tarfile.open(output_filename,", "result = func(*args, **kwargs) os.chdir(cwd) return result return wrapper def make_tarfile(output_filename, source_dir): with", "def TemporaryDirectory(func): '''This decorator creates temporary directory and wraps given fuction''' @wraps(func) def" ]
[ "IntEnum from abc import abstractmethod class PepsEnum(IntEnum): @property @abstractmethod def display_text(self): raise NotImplementedError('display_text", "import abstractmethod class PepsEnum(IntEnum): @property @abstractmethod def display_text(self): raise NotImplementedError('display_text method not implemented')", "enum import IntEnum from abc import abstractmethod class PepsEnum(IntEnum): @property @abstractmethod def display_text(self):", "from abc import abstractmethod class PepsEnum(IntEnum): @property @abstractmethod def display_text(self): raise NotImplementedError('display_text method", "abc import abstractmethod class PepsEnum(IntEnum): @property @abstractmethod def display_text(self): raise NotImplementedError('display_text method not", "from enum import IntEnum from abc import abstractmethod class PepsEnum(IntEnum): @property @abstractmethod def", "import IntEnum from abc import abstractmethod class PepsEnum(IntEnum): @property @abstractmethod def display_text(self): raise" ]
[]
[ "Software Foundation (ASF) under one ## or more contributor license agreements. See the", "), default=Value(0), output_field=models.IntegerField(), ) ), number_of_amber_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=2, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ),", "Case from django.db.models import When from django.db.models import Value from django.urls import reverse", "a queryset for all snapshots ordered by version\"\"\" return ( self.genepanelsnapshot_set.prefetch_related(\"panel\", \"level4title\") .annotate(", "for all snapshots ordered by version\"\"\" return ( self.genepanelsnapshot_set.prefetch_related(\"panel\", \"level4title\") .annotate( number_of_green_genes=Sum( Case(", "super().get_queryset().get(pk=pk) else: return super().get_queryset().get(old_pk=pk) def get_active_panel(self, pk): return self.get_panel(pk).active_panel class GenePanel(TimeStampedModel): STATUS =", "number_of_amber_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=2, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), number_of_red_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=1, then=Value(1)), default=Value(0),", "\"public\", \"retired\", \"internal\", \"deleted\") old_pk = models.CharField( max_length=24, null=True, blank=True, db_index=True ) #", "under the Apache License, Version 2.0 (the ## \"License\"); you may not use", "from .panel_types import PanelType class GenePanelManager(models.Manager): def get_panel(self, pk): if pk.isdigit(): return super().get_queryset().get(pk=pk)", "Licensed to the Apache Software Foundation (ASF) under one ## or more contributor", "(ASF) under one ## or more contributor license agreements. See the NOTICE file", "number_of_green_genes=Sum( Case( When( genepanelentrysnapshot__saved_gel_status__gt=3, then=Value(1), ), default=Value(0), output_field=models.IntegerField(), ) ), number_of_amber_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=2,", "is part of PanelApp ## (see https://panelapp.genomicsengland.co.uk). ## ## Licensed to the Apache", "\"\"\"Return the panel with the largest version and related info\"\"\" return ( self.genepanelsnapshot_set.prefetch_related(", "the NOTICE file ## distributed with this work for additional information ## regarding", "(the ## \"License\"); you may not use this file except in compliance ##", "self.__dict__[\"active_panel\"] @cached_property def active_panel(self): \"\"\"Return the panel with the largest version\"\"\" return self.genepanelsnapshot_set.order_by(", "## software distributed under the License is distributed on an ## \"AS IS\"", "\"level4title\") .annotate( number_of_green_genes=Sum( Case( When( genepanelentrysnapshot__saved_gel_status__gt=3, then=Value(1), ), default=Value(0), output_field=models.IntegerField(), ) ), number_of_amber_genes=Sum(", "this file except in compliance ## with the License. You may obtain a", "string\"\"\" major_version, minor_version = version.split(\".\") return ( self._prepare_panel_query() .filter(major_version=int(major_version), minor_version=int(minor_version)) .first() ) def", "string name = models.CharField(max_length=255, db_index=True) status = models.CharField( choices=STATUS, default=STATUS.internal, max_length=36, db_index=True )", "[GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_deleted(self): return self.status == GenePanel.STATUS.deleted def reject(self): self.status = GenePanel.STATUS.internal", "django.db import models from django.db.models import Sum from django.db.models import Case from django.db.models", "models.CharField(max_length=255, db_index=True) status = models.CharField( choices=STATUS, default=STATUS.internal, max_length=36, db_index=True ) types = models.ManyToManyField(PanelType)", "( self.genepanelsnapshot_set.prefetch_related( \"panel\", \"level4title\", \"genepanelentrysnapshot_set\", \"genepanelentrysnapshot_set__tags\", \"genepanelentrysnapshot_set__evidence\", \"genepanelentrysnapshot_set__gene_core\", \"genepanelentrysnapshot_set__evaluation__comments\", ) .order_by(\"-major_version\", \"-minor_version\", \"-modified\",", "the panel with the largest version\"\"\" return self.genepanelsnapshot_set.order_by( \"-major_version\", \"-minor_version\", \"-modified\", \"-pk\" ).first()", "should be a string\"\"\" major_version, minor_version = version.split(\".\") return ( self._prepare_panel_query() .filter(major_version=int(major_version), minor_version=int(minor_version))", "= models.CharField(max_length=255, db_index=True) status = models.CharField( choices=STATUS, default=STATUS.internal, max_length=36, db_index=True ) types =", "get_panel_version(self, version): \"\"\"Get a specific version. Version argument should be a string\"\"\" major_version,", "## ## Copyright (c) 2016-2019 Genomics England Ltd. ## ## This file is", "to you under the Apache License, Version 2.0 (the ## \"License\"); you may", "and limitations ## under the License. ## from django.db import models from django.db.models", "work for additional information ## regarding copyright ownership. The ASF licenses this file", "related info\"\"\" return ( self.genepanelsnapshot_set.prefetch_related( \"panel\", \"level4title\", \"genepanelentrysnapshot_set\", \"genepanelentrysnapshot_set__tags\", \"genepanelentrysnapshot_set__evidence\", \"genepanelentrysnapshot_set__gene_core\", \"genepanelentrysnapshot_set__evaluation__comments\", )", "Mongo ObjectID hex string name = models.CharField(max_length=255, db_index=True) status = models.CharField( choices=STATUS, default=STATUS.internal,", "django.db.models import Value from django.urls import reverse from django.utils.functional import cached_property from model_utils", "minor_version = version.split(\".\") return ( self._prepare_panel_query() .filter(major_version=int(major_version), minor_version=int(minor_version)) .first() ) def add_activity(self, user,", "one ## or more contributor license agreements. See the NOTICE file ## distributed", "## ## This file is part of PanelApp ## (see https://panelapp.genomicsengland.co.uk). ## ##", "def approve(self): self.status = GenePanel.STATUS.public self.save() def is_approved(self): return self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted]", "agreements. See the NOTICE file ## distributed with this work for additional information", "return super().get_queryset().get(old_pk=pk) def get_active_panel(self, pk): return self.get_panel(pk).active_panel class GenePanel(TimeStampedModel): STATUS = Choices(\"promoted\", \"public\",", "class GenePanelManager(models.Manager): def get_panel(self, pk): if pk.isdigit(): return super().get_queryset().get(pk=pk) else: return super().get_queryset().get(old_pk=pk) def", "\"level4title\", \"genepanelentrysnapshot_set\", \"genepanelentrysnapshot_set__tags\", \"genepanelentrysnapshot_set__evidence\", \"genepanelentrysnapshot_set__gene_core\", \"genepanelentrysnapshot_set__evaluation__comments\", ) .order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\") .first() )", "return self.status == GenePanel.STATUS.deleted def reject(self): self.status = GenePanel.STATUS.internal self.save() def get_absolute_url(self): return", "approve(self): self.status = GenePanel.STATUS.public self.save() def is_approved(self): return self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def", "## or more contributor license agreements. See the NOTICE file ## distributed with", "Version 2.0 (the ## \"License\"); you may not use this file except in", "from django.db.models import Case from django.db.models import When from django.db.models import Value from", "output_field=models.IntegerField(), ) ), number_of_red_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=1, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), number_of_gray_genes=Sum( Case(", "law or agreed to in writing, ## software distributed under the License is", "unique_id(self): return self.old_pk if self.old_pk else str(self.pk) def approve(self): self.status = GenePanel.STATUS.public self.save()", "GenePanel.STATUS.deleted def reject(self): self.status = GenePanel.STATUS.internal self.save() def get_absolute_url(self): return reverse(\"panels:detail\", args=(self.pk,)) def", "self.old_pk else str(self.pk) def approve(self): self.status = GenePanel.STATUS.public self.save() def is_approved(self): return self.status", "reverse(\"panels:detail\", args=(self.pk,)) def _prepare_panel_query(self): \"\"\"Returns a queryset for all snapshots ordered by version\"\"\"", "from django.db.models import When from django.db.models import Value from django.urls import reverse from", "be a string\"\"\" major_version, minor_version = version.split(\".\") return ( self._prepare_panel_query() .filter(major_version=int(major_version), minor_version=int(minor_version)) .first()", "version {}.{}\".format(self.name, ap.major_version, ap.minor_version) @property def unique_id(self): return self.old_pk if self.old_pk else str(self.pk)", "else str(self.pk) def approve(self): self.status = GenePanel.STATUS.public self.save() def is_approved(self): return self.status in", "pk.isdigit(): return super().get_queryset().get(pk=pk) else: return super().get_queryset().get(old_pk=pk) def get_active_panel(self, pk): return self.get_panel(pk).active_panel class GenePanel(TimeStampedModel):", "self.status == GenePanel.STATUS.deleted def reject(self): self.status = GenePanel.STATUS.internal self.save() def get_absolute_url(self): return reverse(\"panels:detail\",", "When(genepanelentrysnapshot__saved_gel_status=2, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), number_of_red_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=1, then=Value(1)), default=Value(0), output_field=models.IntegerField(), )", "\"-modified\", \"-pk\") .first() ) def get_panel_version(self, version): \"\"\"Get a specific version. Version argument", "\"{} version {}.{}\".format(self.name, ap.major_version, ap.minor_version) @property def unique_id(self): return self.old_pk if self.old_pk else", "Version argument should be a string\"\"\" major_version, minor_version = version.split(\".\") return ( self._prepare_panel_query()", "in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_deleted(self): return self.status == GenePanel.STATUS.deleted def reject(self): self.status =", "## Licensed to the Apache Software Foundation (ASF) under one ## or more", "def unique_id(self): return self.old_pk if self.old_pk else str(self.pk) def approve(self): self.status = GenePanel.STATUS.public", "\"-modified\", \"-pk\" ).first() @cached_property def active_panel_extra(self): \"\"\"Return the panel with the largest version", "License. You may obtain a copy of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0", "status = models.CharField( choices=STATUS, default=STATUS.internal, max_length=36, db_index=True ) types = models.ManyToManyField(PanelType) objects =", "from django.db.models import Sum from django.db.models import Case from django.db.models import When from", "models.CharField( choices=STATUS, default=STATUS.internal, max_length=36, db_index=True ) types = models.ManyToManyField(PanelType) objects = GenePanelManager() def", "pk): return self.get_panel(pk).active_panel class GenePanel(TimeStampedModel): STATUS = Choices(\"promoted\", \"public\", \"retired\", \"internal\", \"deleted\") old_pk", "When(genepanelentrysnapshot__saved_gel_status=1, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), number_of_gray_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=0, then=Value(1)), default=Value(0), output_field=models.IntegerField(), )", "django.db.models import Case from django.db.models import When from django.db.models import Value from django.urls", "Sum from django.db.models import Case from django.db.models import When from django.db.models import Value", "from django.urls import reverse from django.utils.functional import cached_property from model_utils import Choices from", "import PanelType class GenePanelManager(models.Manager): def get_panel(self, pk): if pk.isdigit(): return super().get_queryset().get(pk=pk) else: return", "else: return super().get_queryset().get(old_pk=pk) def get_active_panel(self, pk): return self.get_panel(pk).active_panel class GenePanel(TimeStampedModel): STATUS = Choices(\"promoted\",", "version\"\"\" return self.genepanelsnapshot_set.order_by( \"-major_version\", \"-minor_version\", \"-modified\", \"-pk\" ).first() @cached_property def active_panel_extra(self): \"\"\"Return the", "= models.ManyToManyField(PanelType) objects = GenePanelManager() def __str__(self): ap = self.active_panel return \"{} version", "if pk.isdigit(): return super().get_queryset().get(pk=pk) else: return super().get_queryset().get(old_pk=pk) def get_active_panel(self, pk): return self.get_panel(pk).active_panel class", "License. ## from django.db import models from django.db.models import Sum from django.db.models import", "applicable law or agreed to in writing, ## software distributed under the License", "not use this file except in compliance ## with the License. You may", "model_utils.models import TimeStampedModel from .panel_types import PanelType class GenePanelManager(models.Manager): def get_panel(self, pk): if", "file ## to you under the Apache License, Version 2.0 (the ## \"License\");", "from django.db import models from django.db.models import Sum from django.db.models import Case from", "NOTICE file ## distributed with this work for additional information ## regarding copyright", "2016-2019 Genomics England Ltd. ## ## This file is part of PanelApp ##", "License is distributed on an ## \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "models.CharField( max_length=24, null=True, blank=True, db_index=True ) # Mongo ObjectID hex string name =", "information ## regarding copyright ownership. The ASF licenses this file ## to you", "def _prepare_panel_query(self): \"\"\"Returns a queryset for all snapshots ordered by version\"\"\" return (", "regarding copyright ownership. The ASF licenses this file ## to you under the", "output_field=models.IntegerField(), ) ), number_of_amber_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=2, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), number_of_red_genes=Sum( Case(", "See the NOTICE file ## distributed with this work for additional information ##", "a string\"\"\" major_version, minor_version = version.split(\".\") return ( self._prepare_panel_query() .filter(major_version=int(major_version), minor_version=int(minor_version)) .first() )", "clear_cache(self): if self.active_panel: del self.__dict__[\"active_panel\"] @cached_property def active_panel(self): \"\"\"Return the panel with the", "is distributed on an ## \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "KIND, either express or implied. See the License for the ## specific language", "import TimeStampedModel from .panel_types import PanelType class GenePanelManager(models.Manager): def get_panel(self, pk): if pk.isdigit():", "= GenePanelManager() def __str__(self): ap = self.active_panel return \"{} version {}.{}\".format(self.name, ap.major_version, ap.minor_version)", "contributor license agreements. See the NOTICE file ## distributed with this work for", "## under the License. ## from django.db import models from django.db.models import Sum", "Genomics England Ltd. ## ## This file is part of PanelApp ## (see", "def get_panel(self, pk): if pk.isdigit(): return super().get_queryset().get(pk=pk) else: return super().get_queryset().get(old_pk=pk) def get_active_panel(self, pk):", "return ( self._prepare_panel_query() .filter(major_version=int(major_version), minor_version=int(minor_version)) .first() ) def add_activity(self, user, text, entity=None): \"\"\"Adds", "use this file except in compliance ## with the License. You may obtain", "the Apache License, Version 2.0 (the ## \"License\"); you may not use this", "## \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY ## KIND, either", "a specific version. Version argument should be a string\"\"\" major_version, minor_version = version.split(\".\")", ") def add_activity(self, user, text, entity=None): \"\"\"Adds activity for this panel\"\"\" self.active_panel.add_activity(user, text)", "for additional information ## regarding copyright ownership. The ASF licenses this file ##", "\"genepanelentrysnapshot_set__tags\", \"genepanelentrysnapshot_set__evidence\", \"genepanelentrysnapshot_set__gene_core\", \"genepanelentrysnapshot_set__evaluation__comments\", ) .order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\") .first() ) def get_panel_version(self,", "= models.CharField( choices=STATUS, default=STATUS.internal, max_length=36, db_index=True ) types = models.ManyToManyField(PanelType) objects = GenePanelManager()", "return ( self.genepanelsnapshot_set.prefetch_related( \"panel\", \"level4title\", \"genepanelentrysnapshot_set\", \"genepanelentrysnapshot_set__tags\", \"genepanelentrysnapshot_set__evidence\", \"genepanelentrysnapshot_set__gene_core\", \"genepanelentrysnapshot_set__evaluation__comments\", ) .order_by(\"-major_version\", \"-minor_version\",", "Copyright (c) 2016-2019 Genomics England Ltd. ## ## This file is part of", "import When from django.db.models import Value from django.urls import reverse from django.utils.functional import", "WITHOUT WARRANTIES OR CONDITIONS OF ANY ## KIND, either express or implied. See", "return self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_public(self): return self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def", "GenePanel.STATUS.internal self.save() def get_absolute_url(self): return reverse(\"panels:detail\", args=(self.pk,)) def _prepare_panel_query(self): \"\"\"Returns a queryset for", "\"\"\"Return the panel with the largest version\"\"\" return self.genepanelsnapshot_set.order_by( \"-major_version\", \"-minor_version\", \"-modified\", \"-pk\"", "a copy of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required", "( self._prepare_panel_query() .filter(major_version=int(major_version), minor_version=int(minor_version)) .first() ) def add_activity(self, user, text, entity=None): \"\"\"Adds activity", ") .order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\") .first() ) def get_panel_version(self, version): \"\"\"Get a specific", "or more contributor license agreements. See the NOTICE file ## distributed with this", "Apache License, Version 2.0 (the ## \"License\"); you may not use this file", ".order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\") ) def clear_cache(self): if self.active_panel: del self.__dict__[\"active_panel\"] @cached_property def", "from django.utils.functional import cached_property from model_utils import Choices from model_utils.models import TimeStampedModel from", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY ## KIND, either express or implied.", "and related info\"\"\" return ( self.genepanelsnapshot_set.prefetch_related( \"panel\", \"level4title\", \"genepanelentrysnapshot_set\", \"genepanelentrysnapshot_set__tags\", \"genepanelentrysnapshot_set__evidence\", \"genepanelentrysnapshot_set__gene_core\", \"genepanelentrysnapshot_set__evaluation__comments\",", "agreed to in writing, ## software distributed under the License is distributed on", "all snapshots ordered by version\"\"\" return ( self.genepanelsnapshot_set.prefetch_related(\"panel\", \"level4title\") .annotate( number_of_green_genes=Sum( Case( When(", "## \"License\"); you may not use this file except in compliance ## with", "old_pk = models.CharField( max_length=24, null=True, blank=True, db_index=True ) # Mongo ObjectID hex string", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY ## KIND, either express", "by version\"\"\" return ( self.genepanelsnapshot_set.prefetch_related(\"panel\", \"level4title\") .annotate( number_of_green_genes=Sum( Case( When( genepanelentrysnapshot__saved_gel_status__gt=3, then=Value(1), ),", "output_field=models.IntegerField(), ) ), number_of_gray_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=0, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), ) .order_by(\"-major_version\",", "panel with the largest version and related info\"\"\" return ( self.genepanelsnapshot_set.prefetch_related( \"panel\", \"level4title\",", "@property def unique_id(self): return self.old_pk if self.old_pk else str(self.pk) def approve(self): self.status =", "<filename>panelapp/panels/models/genepanel.py ## ## Copyright (c) 2016-2019 Genomics England Ltd. ## ## This file", "from model_utils import Choices from model_utils.models import TimeStampedModel from .panel_types import PanelType class", "info\"\"\" return ( self.genepanelsnapshot_set.prefetch_related( \"panel\", \"level4title\", \"genepanelentrysnapshot_set\", \"genepanelentrysnapshot_set__tags\", \"genepanelentrysnapshot_set__evidence\", \"genepanelentrysnapshot_set__gene_core\", \"genepanelentrysnapshot_set__evaluation__comments\", ) .order_by(\"-major_version\",", "self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_deleted(self): return self.status == GenePanel.STATUS.deleted def reject(self): self.status", "\"deleted\") old_pk = models.CharField( max_length=24, null=True, blank=True, db_index=True ) # Mongo ObjectID hex", ") ), number_of_gray_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=0, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), ) .order_by(\"-major_version\", \"-minor_version\",", "default=STATUS.internal, max_length=36, db_index=True ) types = models.ManyToManyField(PanelType) objects = GenePanelManager() def __str__(self): ap", "## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law or agreed to in", "limitations ## under the License. ## from django.db import models from django.db.models import", "active_panel(self): \"\"\"Return the panel with the largest version\"\"\" return self.genepanelsnapshot_set.order_by( \"-major_version\", \"-minor_version\", \"-modified\",", "args=(self.pk,)) def _prepare_panel_query(self): \"\"\"Returns a queryset for all snapshots ordered by version\"\"\" return", "super().get_queryset().get(old_pk=pk) def get_active_panel(self, pk): return self.get_panel(pk).active_panel class GenePanel(TimeStampedModel): STATUS = Choices(\"promoted\", \"public\", \"retired\",", "hex string name = models.CharField(max_length=255, db_index=True) status = models.CharField( choices=STATUS, default=STATUS.internal, max_length=36, db_index=True", "( self.genepanelsnapshot_set.prefetch_related(\"panel\", \"level4title\") .annotate( number_of_green_genes=Sum( Case( When( genepanelentrysnapshot__saved_gel_status__gt=3, then=Value(1), ), default=Value(0), output_field=models.IntegerField(), )", "@cached_property def active_panel_extra(self): \"\"\"Return the panel with the largest version and related info\"\"\"", "self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_public(self): return self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_deleted(self):", "file ## distributed with this work for additional information ## regarding copyright ownership.", "file except in compliance ## with the License. You may obtain a copy", "## from django.db import models from django.db.models import Sum from django.db.models import Case", "the Apache Software Foundation (ASF) under one ## or more contributor license agreements.", "When(genepanelentrysnapshot__saved_gel_status=0, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), ) .order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\") ) def", "self.status = GenePanel.STATUS.public self.save() def is_approved(self): return self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_public(self):", "express or implied. See the License for the ## specific language governing permissions", "governing permissions and limitations ## under the License. ## from django.db import models", "specific version. Version argument should be a string\"\"\" major_version, minor_version = version.split(\".\") return", "the panel with the largest version and related info\"\"\" return ( self.genepanelsnapshot_set.prefetch_related( \"panel\",", "[GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_public(self): return self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_deleted(self): return self.status", "\"-minor_version\", \"-modified\", \"-pk\" ).first() @cached_property def active_panel_extra(self): \"\"\"Return the panel with the largest", "snapshots ordered by version\"\"\" return ( self.genepanelsnapshot_set.prefetch_related(\"panel\", \"level4title\") .annotate( number_of_green_genes=Sum( Case( When( genepanelentrysnapshot__saved_gel_status__gt=3,", "Case( When(genepanelentrysnapshot__saved_gel_status=0, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), ) .order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\") )", "= self.active_panel return \"{} version {}.{}\".format(self.name, ap.major_version, ap.minor_version) @property def unique_id(self): return self.old_pk", "return ( self.genepanelsnapshot_set.prefetch_related(\"panel\", \"level4title\") .annotate( number_of_green_genes=Sum( Case( When( genepanelentrysnapshot__saved_gel_status__gt=3, then=Value(1), ), default=Value(0), output_field=models.IntegerField(),", "then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), number_of_red_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=1, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ),", "= version.split(\".\") return ( self._prepare_panel_query() .filter(major_version=int(major_version), minor_version=int(minor_version)) .first() ) def add_activity(self, user, text,", "return \"{} version {}.{}\".format(self.name, ap.major_version, ap.minor_version) @property def unique_id(self): return self.old_pk if self.old_pk", "\"-pk\") .first() ) def get_panel_version(self, version): \"\"\"Get a specific version. Version argument should", "if self.active_panel: del self.__dict__[\"active_panel\"] @cached_property def active_panel(self): \"\"\"Return the panel with the largest", "\"-minor_version\", \"-modified\", \"-pk\") .first() ) def get_panel_version(self, version): \"\"\"Get a specific version. Version", "PanelApp ## (see https://panelapp.genomicsengland.co.uk). ## ## Licensed to the Apache Software Foundation (ASF)", "ANY ## KIND, either express or implied. See the License for the ##", "GenePanelManager(models.Manager): def get_panel(self, pk): if pk.isdigit(): return super().get_queryset().get(pk=pk) else: return super().get_queryset().get(old_pk=pk) def get_active_panel(self,", ".first() ) def get_panel_version(self, version): \"\"\"Get a specific version. Version argument should be", "OF ANY ## KIND, either express or implied. See the License for the", "## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law or agreed to", "PanelType class GenePanelManager(models.Manager): def get_panel(self, pk): if pk.isdigit(): return super().get_queryset().get(pk=pk) else: return super().get_queryset().get(old_pk=pk)", "pk): if pk.isdigit(): return super().get_queryset().get(pk=pk) else: return super().get_queryset().get(old_pk=pk) def get_active_panel(self, pk): return self.get_panel(pk).active_panel", "may not use this file except in compliance ## with the License. You", "def get_panel_version(self, version): \"\"\"Get a specific version. Version argument should be a string\"\"\"", "GenePanel.STATUS.public self.save() def is_approved(self): return self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_public(self): return self.status", "Case( When(genepanelentrysnapshot__saved_gel_status=1, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), number_of_gray_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=0, then=Value(1)), default=Value(0), output_field=models.IntegerField(),", "in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_public(self): return self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_deleted(self): return", "is_deleted(self): return self.status == GenePanel.STATUS.deleted def reject(self): self.status = GenePanel.STATUS.internal self.save() def get_absolute_url(self):", "Value from django.urls import reverse from django.utils.functional import cached_property from model_utils import Choices", "https://panelapp.genomicsengland.co.uk). ## ## Licensed to the Apache Software Foundation (ASF) under one ##", "self.active_panel return \"{} version {}.{}\".format(self.name, ap.major_version, ap.minor_version) @property def unique_id(self): return self.old_pk if", "output_field=models.IntegerField(), ) ), ) .order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\") ) def clear_cache(self): if self.active_panel:", "you may not use this file except in compliance ## with the License.", "## with the License. You may obtain a copy of the License at", "_prepare_panel_query(self): \"\"\"Returns a queryset for all snapshots ordered by version\"\"\" return ( self.genepanelsnapshot_set.prefetch_related(\"panel\",", ").first() @cached_property def active_panel_extra(self): \"\"\"Return the panel with the largest version and related", "default=Value(0), output_field=models.IntegerField(), ) ), number_of_gray_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=0, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), )", "of PanelApp ## (see https://panelapp.genomicsengland.co.uk). ## ## Licensed to the Apache Software Foundation", "self.save() def get_absolute_url(self): return reverse(\"panels:detail\", args=(self.pk,)) def _prepare_panel_query(self): \"\"\"Returns a queryset for all", "distributed under the License is distributed on an ## \"AS IS\" BASIS, WITHOUT", "\"-pk\") ) def clear_cache(self): if self.active_panel: del self.__dict__[\"active_panel\"] @cached_property def active_panel(self): \"\"\"Return the", "you under the Apache License, Version 2.0 (the ## \"License\"); you may not", "return self.old_pk if self.old_pk else str(self.pk) def approve(self): self.status = GenePanel.STATUS.public self.save() def", "You may obtain a copy of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ##", "permissions and limitations ## under the License. ## from django.db import models from", "types = models.ManyToManyField(PanelType) objects = GenePanelManager() def __str__(self): ap = self.active_panel return \"{}", "of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable", "software distributed under the License is distributed on an ## \"AS IS\" BASIS,", "is_approved(self): return self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_public(self): return self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted]", "genepanelentrysnapshot__saved_gel_status__gt=3, then=Value(1), ), default=Value(0), output_field=models.IntegerField(), ) ), number_of_amber_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=2, then=Value(1)), default=Value(0), output_field=models.IntegerField(),", "django.utils.functional import cached_property from model_utils import Choices from model_utils.models import TimeStampedModel from .panel_types", "django.db.models import When from django.db.models import Value from django.urls import reverse from django.utils.functional", "default=Value(0), output_field=models.IntegerField(), ) ), number_of_red_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=1, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), number_of_gray_genes=Sum(", "\"-pk\" ).first() @cached_property def active_panel_extra(self): \"\"\"Return the panel with the largest version and", "return self.genepanelsnapshot_set.order_by( \"-major_version\", \"-minor_version\", \"-modified\", \"-pk\" ).first() @cached_property def active_panel_extra(self): \"\"\"Return the panel", "or agreed to in writing, ## software distributed under the License is distributed", "default=Value(0), output_field=models.IntegerField(), ) ), ) .order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\") ) def clear_cache(self): if", "self._prepare_panel_query() .filter(major_version=int(major_version), minor_version=int(minor_version)) .first() ) def add_activity(self, user, text, entity=None): \"\"\"Adds activity for", "ap.minor_version) @property def unique_id(self): return self.old_pk if self.old_pk else str(self.pk) def approve(self): self.status", "models.ManyToManyField(PanelType) objects = GenePanelManager() def __str__(self): ap = self.active_panel return \"{} version {}.{}\".format(self.name,", "number_of_gray_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=0, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), ) .order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\")", "ordered by version\"\"\" return ( self.genepanelsnapshot_set.prefetch_related(\"panel\", \"level4title\") .annotate( number_of_green_genes=Sum( Case( When( genepanelentrysnapshot__saved_gel_status__gt=3, then=Value(1),", "License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law or", "either express or implied. See the License for the ## specific language governing", "http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law or agreed to in writing,", "def __str__(self): ap = self.active_panel return \"{} version {}.{}\".format(self.name, ap.major_version, ap.minor_version) @property def", "return reverse(\"panels:detail\", args=(self.pk,)) def _prepare_panel_query(self): \"\"\"Returns a queryset for all snapshots ordered by", "return self.get_panel(pk).active_panel class GenePanel(TimeStampedModel): STATUS = Choices(\"promoted\", \"public\", \"retired\", \"internal\", \"deleted\") old_pk =", "self.get_panel(pk).active_panel class GenePanel(TimeStampedModel): STATUS = Choices(\"promoted\", \"public\", \"retired\", \"internal\", \"deleted\") old_pk = models.CharField(", "= GenePanel.STATUS.public self.save() def is_approved(self): return self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_public(self): return", "GenePanel(TimeStampedModel): STATUS = Choices(\"promoted\", \"public\", \"retired\", \"internal\", \"deleted\") old_pk = models.CharField( max_length=24, null=True,", "= models.CharField( max_length=24, null=True, blank=True, db_index=True ) # Mongo ObjectID hex string name", "STATUS = Choices(\"promoted\", \"public\", \"retired\", \"internal\", \"deleted\") old_pk = models.CharField( max_length=24, null=True, blank=True,", "to in writing, ## software distributed under the License is distributed on an", "(c) 2016-2019 Genomics England Ltd. ## ## This file is part of PanelApp", "def get_absolute_url(self): return reverse(\"panels:detail\", args=(self.pk,)) def _prepare_panel_query(self): \"\"\"Returns a queryset for all snapshots", "with this work for additional information ## regarding copyright ownership. The ASF licenses", "\"-modified\", \"-pk\") ) def clear_cache(self): if self.active_panel: del self.__dict__[\"active_panel\"] @cached_property def active_panel(self): \"\"\"Return", "__str__(self): ap = self.active_panel return \"{} version {}.{}\".format(self.name, ap.major_version, ap.minor_version) @property def unique_id(self):", "for the ## specific language governing permissions and limitations ## under the License.", "Case( When( genepanelentrysnapshot__saved_gel_status__gt=3, then=Value(1), ), default=Value(0), output_field=models.IntegerField(), ) ), number_of_amber_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=2, then=Value(1)),", "with the largest version and related info\"\"\" return ( self.genepanelsnapshot_set.prefetch_related( \"panel\", \"level4title\", \"genepanelentrysnapshot_set\",", "is_public(self): return self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_deleted(self): return self.status == GenePanel.STATUS.deleted def", "active_panel_extra(self): \"\"\"Return the panel with the largest version and related info\"\"\" return (", "License, Version 2.0 (the ## \"License\"); you may not use this file except", "\"genepanelentrysnapshot_set__gene_core\", \"genepanelentrysnapshot_set__evaluation__comments\", ) .order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\") .first() ) def get_panel_version(self, version): \"\"\"Get", "obtain a copy of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless", "self.status = GenePanel.STATUS.internal self.save() def get_absolute_url(self): return reverse(\"panels:detail\", args=(self.pk,)) def _prepare_panel_query(self): \"\"\"Returns a", "blank=True, db_index=True ) # Mongo ObjectID hex string name = models.CharField(max_length=255, db_index=True) status", "ObjectID hex string name = models.CharField(max_length=255, db_index=True) status = models.CharField( choices=STATUS, default=STATUS.internal, max_length=36,", "required by applicable law or agreed to in writing, ## software distributed under", "the License for the ## specific language governing permissions and limitations ## under", "distributed with this work for additional information ## regarding copyright ownership. The ASF", "import cached_property from model_utils import Choices from model_utils.models import TimeStampedModel from .panel_types import", ".filter(major_version=int(major_version), minor_version=int(minor_version)) .first() ) def add_activity(self, user, text, entity=None): \"\"\"Adds activity for this", "See the License for the ## specific language governing permissions and limitations ##", ") def get_panel_version(self, version): \"\"\"Get a specific version. Version argument should be a", "from model_utils.models import TimeStampedModel from .panel_types import PanelType class GenePanelManager(models.Manager): def get_panel(self, pk):", "def active_panel(self): \"\"\"Return the panel with the largest version\"\"\" return self.genepanelsnapshot_set.order_by( \"-major_version\", \"-minor_version\",", "## distributed with this work for additional information ## regarding copyright ownership. The", "objects = GenePanelManager() def __str__(self): ap = self.active_panel return \"{} version {}.{}\".format(self.name, ap.major_version,", "Foundation (ASF) under one ## or more contributor license agreements. See the NOTICE", "the License is distributed on an ## \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "), number_of_red_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=1, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), number_of_gray_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=0, then=Value(1)),", "by applicable law or agreed to in writing, ## software distributed under the", "the License. You may obtain a copy of the License at ## ##", "implied. See the License for the ## specific language governing permissions and limitations", "self.active_panel: del self.__dict__[\"active_panel\"] @cached_property def active_panel(self): \"\"\"Return the panel with the largest version\"\"\"", "= Choices(\"promoted\", \"public\", \"retired\", \"internal\", \"deleted\") old_pk = models.CharField( max_length=24, null=True, blank=True, db_index=True", "db_index=True ) types = models.ManyToManyField(PanelType) objects = GenePanelManager() def __str__(self): ap = self.active_panel", "Choices(\"promoted\", \"public\", \"retired\", \"internal\", \"deleted\") old_pk = models.CharField( max_length=24, null=True, blank=True, db_index=True )", ".panel_types import PanelType class GenePanelManager(models.Manager): def get_panel(self, pk): if pk.isdigit(): return super().get_queryset().get(pk=pk) else:", "reverse from django.utils.functional import cached_property from model_utils import Choices from model_utils.models import TimeStampedModel", "License for the ## specific language governing permissions and limitations ## under the", "Apache Software Foundation (ASF) under one ## or more contributor license agreements. See", "import Value from django.urls import reverse from django.utils.functional import cached_property from model_utils import", "more contributor license agreements. See the NOTICE file ## distributed with this work", "queryset for all snapshots ordered by version\"\"\" return ( self.genepanelsnapshot_set.prefetch_related(\"panel\", \"level4title\") .annotate( number_of_green_genes=Sum(", "major_version, minor_version = version.split(\".\") return ( self._prepare_panel_query() .filter(major_version=int(major_version), minor_version=int(minor_version)) .first() ) def add_activity(self,", "), ) .order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\") ) def clear_cache(self): if self.active_panel: del self.__dict__[\"active_panel\"]", "the largest version and related info\"\"\" return ( self.genepanelsnapshot_set.prefetch_related( \"panel\", \"level4title\", \"genepanelentrysnapshot_set\", \"genepanelentrysnapshot_set__tags\",", "distributed on an ## \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "language governing permissions and limitations ## under the License. ## from django.db import", "import reverse from django.utils.functional import cached_property from model_utils import Choices from model_utils.models import", "copyright ownership. The ASF licenses this file ## to you under the Apache", "under one ## or more contributor license agreements. See the NOTICE file ##", ") def clear_cache(self): if self.active_panel: del self.__dict__[\"active_panel\"] @cached_property def active_panel(self): \"\"\"Return the panel", "then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), ) .order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\") ) def clear_cache(self):", "\"internal\", \"deleted\") old_pk = models.CharField( max_length=24, null=True, blank=True, db_index=True ) # Mongo ObjectID", "this work for additional information ## regarding copyright ownership. The ASF licenses this", "license agreements. See the NOTICE file ## distributed with this work for additional", "at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law or agreed", ".annotate( number_of_green_genes=Sum( Case( When( genepanelentrysnapshot__saved_gel_status__gt=3, then=Value(1), ), default=Value(0), output_field=models.IntegerField(), ) ), number_of_amber_genes=Sum( Case(", "def get_active_panel(self, pk): return self.get_panel(pk).active_panel class GenePanel(TimeStampedModel): STATUS = Choices(\"promoted\", \"public\", \"retired\", \"internal\",", "from django.db.models import Value from django.urls import reverse from django.utils.functional import cached_property from", "version. Version argument should be a string\"\"\" major_version, minor_version = version.split(\".\") return (", "## regarding copyright ownership. The ASF licenses this file ## to you under", "str(self.pk) def approve(self): self.status = GenePanel.STATUS.public self.save() def is_approved(self): return self.status in [GenePanel.STATUS.public,", "Unless required by applicable law or agreed to in writing, ## software distributed", "\"-minor_version\", \"-modified\", \"-pk\") ) def clear_cache(self): if self.active_panel: del self.__dict__[\"active_panel\"] @cached_property def active_panel(self):", "## to you under the Apache License, Version 2.0 (the ## \"License\"); you", "null=True, blank=True, db_index=True ) # Mongo ObjectID hex string name = models.CharField(max_length=255, db_index=True)", "def active_panel_extra(self): \"\"\"Return the panel with the largest version and related info\"\"\" return", "in compliance ## with the License. You may obtain a copy of the", "OR CONDITIONS OF ANY ## KIND, either express or implied. See the License", "class GenePanel(TimeStampedModel): STATUS = Choices(\"promoted\", \"public\", \"retired\", \"internal\", \"deleted\") old_pk = models.CharField( max_length=24,", "with the largest version\"\"\" return self.genepanelsnapshot_set.order_by( \"-major_version\", \"-minor_version\", \"-modified\", \"-pk\" ).first() @cached_property def", "## This file is part of PanelApp ## (see https://panelapp.genomicsengland.co.uk). ## ## Licensed", "self.save() def is_approved(self): return self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_public(self): return self.status in", "## Copyright (c) 2016-2019 Genomics England Ltd. ## ## This file is part", "choices=STATUS, default=STATUS.internal, max_length=36, db_index=True ) types = models.ManyToManyField(PanelType) objects = GenePanelManager() def __str__(self):", "copy of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by", "under the License. ## from django.db import models from django.db.models import Sum from", "reject(self): self.status = GenePanel.STATUS.internal self.save() def get_absolute_url(self): return reverse(\"panels:detail\", args=(self.pk,)) def _prepare_panel_query(self): \"\"\"Returns", ") ), number_of_red_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=1, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), number_of_gray_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=0,", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY ## KIND, either express or", "or implied. See the License for the ## specific language governing permissions and", "GenePanel.STATUS.promoted] def is_public(self): return self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_deleted(self): return self.status ==", "max_length=36, db_index=True ) types = models.ManyToManyField(PanelType) objects = GenePanelManager() def __str__(self): ap =", "\"genepanelentrysnapshot_set\", \"genepanelentrysnapshot_set__tags\", \"genepanelentrysnapshot_set__evidence\", \"genepanelentrysnapshot_set__gene_core\", \"genepanelentrysnapshot_set__evaluation__comments\", ) .order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\") .first() ) def", "), number_of_gray_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=0, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), ) .order_by(\"-major_version\", \"-minor_version\", \"-modified\",", "models from django.db.models import Sum from django.db.models import Case from django.db.models import When", "## ## Unless required by applicable law or agreed to in writing, ##", "return self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_deleted(self): return self.status == GenePanel.STATUS.deleted def reject(self):", "), number_of_amber_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=2, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), number_of_red_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=1, then=Value(1)),", "django.db.models import Sum from django.db.models import Case from django.db.models import When from django.db.models", "with the License. You may obtain a copy of the License at ##", "ap = self.active_panel return \"{} version {}.{}\".format(self.name, ap.major_version, ap.minor_version) @property def unique_id(self): return", "get_absolute_url(self): return reverse(\"panels:detail\", args=(self.pk,)) def _prepare_panel_query(self): \"\"\"Returns a queryset for all snapshots ordered", ") ), number_of_amber_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=2, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), number_of_red_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=1,", ") ), ) .order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\") ) def clear_cache(self): if self.active_panel: del", "number_of_red_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=1, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), number_of_gray_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=0, then=Value(1)), default=Value(0),", "## KIND, either express or implied. See the License for the ## specific", "WARRANTIES OR CONDITIONS OF ANY ## KIND, either express or implied. See the", "this file ## to you under the Apache License, Version 2.0 (the ##", ") .order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\") ) def clear_cache(self): if self.active_panel: del self.__dict__[\"active_panel\"] @cached_property", "self.genepanelsnapshot_set.prefetch_related(\"panel\", \"level4title\") .annotate( number_of_green_genes=Sum( Case( When( genepanelentrysnapshot__saved_gel_status__gt=3, then=Value(1), ), default=Value(0), output_field=models.IntegerField(), ) ),", "except in compliance ## with the License. You may obtain a copy of", "the largest version\"\"\" return self.genepanelsnapshot_set.order_by( \"-major_version\", \"-minor_version\", \"-modified\", \"-pk\" ).first() @cached_property def active_panel_extra(self):", "## specific language governing permissions and limitations ## under the License. ## from", "def is_public(self): return self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_deleted(self): return self.status == GenePanel.STATUS.deleted", "default=Value(0), output_field=models.IntegerField(), ) ), number_of_amber_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=2, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), number_of_red_genes=Sum(", "version.split(\".\") return ( self._prepare_panel_query() .filter(major_version=int(major_version), minor_version=int(minor_version)) .first() ) def add_activity(self, user, text, entity=None):", "cached_property from model_utils import Choices from model_utils.models import TimeStampedModel from .panel_types import PanelType", "under the License is distributed on an ## \"AS IS\" BASIS, WITHOUT WARRANTIES", "the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law", "max_length=24, null=True, blank=True, db_index=True ) # Mongo ObjectID hex string name = models.CharField(max_length=255,", "== GenePanel.STATUS.deleted def reject(self): self.status = GenePanel.STATUS.internal self.save() def get_absolute_url(self): return reverse(\"panels:detail\", args=(self.pk,))", "name = models.CharField(max_length=255, db_index=True) status = models.CharField( choices=STATUS, default=STATUS.internal, max_length=36, db_index=True ) types", "version): \"\"\"Get a specific version. Version argument should be a string\"\"\" major_version, minor_version", "(see https://panelapp.genomicsengland.co.uk). ## ## Licensed to the Apache Software Foundation (ASF) under one", "self.old_pk if self.old_pk else str(self.pk) def approve(self): self.status = GenePanel.STATUS.public self.save() def is_approved(self):", "largest version\"\"\" return self.genepanelsnapshot_set.order_by( \"-major_version\", \"-minor_version\", \"-modified\", \"-pk\" ).first() @cached_property def active_panel_extra(self): \"\"\"Return", "GenePanelManager() def __str__(self): ap = self.active_panel return \"{} version {}.{}\".format(self.name, ap.major_version, ap.minor_version) @property", "db_index=True ) # Mongo ObjectID hex string name = models.CharField(max_length=255, db_index=True) status =", "This file is part of PanelApp ## (see https://panelapp.genomicsengland.co.uk). ## ## Licensed to", "2.0 (the ## \"License\"); you may not use this file except in compliance", "When( genepanelentrysnapshot__saved_gel_status__gt=3, then=Value(1), ), default=Value(0), output_field=models.IntegerField(), ) ), number_of_amber_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=2, then=Value(1)), default=Value(0),", "specific language governing permissions and limitations ## under the License. ## from django.db", "file is part of PanelApp ## (see https://panelapp.genomicsengland.co.uk). ## ## Licensed to the", "\"License\"); you may not use this file except in compliance ## with the", "del self.__dict__[\"active_panel\"] @cached_property def active_panel(self): \"\"\"Return the panel with the largest version\"\"\" return", "in writing, ## software distributed under the License is distributed on an ##", "to the Apache Software Foundation (ASF) under one ## or more contributor license", "model_utils import Choices from model_utils.models import TimeStampedModel from .panel_types import PanelType class GenePanelManager(models.Manager):", "def clear_cache(self): if self.active_panel: del self.__dict__[\"active_panel\"] @cached_property def active_panel(self): \"\"\"Return the panel with", "import models from django.db.models import Sum from django.db.models import Case from django.db.models import", "then=Value(1), ), default=Value(0), output_field=models.IntegerField(), ) ), number_of_amber_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=2, then=Value(1)), default=Value(0), output_field=models.IntegerField(), )", "Choices from model_utils.models import TimeStampedModel from .panel_types import PanelType class GenePanelManager(models.Manager): def get_panel(self,", "import Choices from model_utils.models import TimeStampedModel from .panel_types import PanelType class GenePanelManager(models.Manager): def", "GenePanel.STATUS.promoted] def is_deleted(self): return self.status == GenePanel.STATUS.deleted def reject(self): self.status = GenePanel.STATUS.internal self.save()", "= GenePanel.STATUS.internal self.save() def get_absolute_url(self): return reverse(\"panels:detail\", args=(self.pk,)) def _prepare_panel_query(self): \"\"\"Returns a queryset", "the ## specific language governing permissions and limitations ## under the License. ##", "self.genepanelsnapshot_set.order_by( \"-major_version\", \"-minor_version\", \"-modified\", \"-pk\" ).first() @cached_property def active_panel_extra(self): \"\"\"Return the panel with", "django.urls import reverse from django.utils.functional import cached_property from model_utils import Choices from model_utils.models", "if self.old_pk else str(self.pk) def approve(self): self.status = GenePanel.STATUS.public self.save() def is_approved(self): return", "import Sum from django.db.models import Case from django.db.models import When from django.db.models import", "\"genepanelentrysnapshot_set__evaluation__comments\", ) .order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\") .first() ) def get_panel_version(self, version): \"\"\"Get a", "version and related info\"\"\" return ( self.genepanelsnapshot_set.prefetch_related( \"panel\", \"level4title\", \"genepanelentrysnapshot_set\", \"genepanelentrysnapshot_set__tags\", \"genepanelentrysnapshot_set__evidence\", \"genepanelentrysnapshot_set__gene_core\",", "{}.{}\".format(self.name, ap.major_version, ap.minor_version) @property def unique_id(self): return self.old_pk if self.old_pk else str(self.pk) def", "then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), number_of_gray_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=0, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ),", "minor_version=int(minor_version)) .first() ) def add_activity(self, user, text, entity=None): \"\"\"Adds activity for this panel\"\"\"", "## ## Licensed to the Apache Software Foundation (ASF) under one ## or", "self.genepanelsnapshot_set.prefetch_related( \"panel\", \"level4title\", \"genepanelentrysnapshot_set\", \"genepanelentrysnapshot_set__tags\", \"genepanelentrysnapshot_set__evidence\", \"genepanelentrysnapshot_set__gene_core\", \"genepanelentrysnapshot_set__evaluation__comments\", ) .order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\")", "def is_approved(self): return self.status in [GenePanel.STATUS.public, GenePanel.STATUS.promoted] def is_public(self): return self.status in [GenePanel.STATUS.public,", "panel with the largest version\"\"\" return self.genepanelsnapshot_set.order_by( \"-major_version\", \"-minor_version\", \"-modified\", \"-pk\" ).first() @cached_property", "licenses this file ## to you under the Apache License, Version 2.0 (the", ") # Mongo ObjectID hex string name = models.CharField(max_length=255, db_index=True) status = models.CharField(", "an ## \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY ## KIND,", "get_panel(self, pk): if pk.isdigit(): return super().get_queryset().get(pk=pk) else: return super().get_queryset().get(old_pk=pk) def get_active_panel(self, pk): return", "def reject(self): self.status = GenePanel.STATUS.internal self.save() def get_absolute_url(self): return reverse(\"panels:detail\", args=(self.pk,)) def _prepare_panel_query(self):", "\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\" ).first() @cached_property def active_panel_extra(self): \"\"\"Return the panel with the", "ownership. The ASF licenses this file ## to you under the Apache License,", "\"\"\"Returns a queryset for all snapshots ordered by version\"\"\" return ( self.genepanelsnapshot_set.prefetch_related(\"panel\", \"level4title\")", "db_index=True) status = models.CharField( choices=STATUS, default=STATUS.internal, max_length=36, db_index=True ) types = models.ManyToManyField(PanelType) objects", ".first() ) def add_activity(self, user, text, entity=None): \"\"\"Adds activity for this panel\"\"\" self.active_panel.add_activity(user,", "additional information ## regarding copyright ownership. The ASF licenses this file ## to", "ap.major_version, ap.minor_version) @property def unique_id(self): return self.old_pk if self.old_pk else str(self.pk) def approve(self):", "When from django.db.models import Value from django.urls import reverse from django.utils.functional import cached_property", "## (see https://panelapp.genomicsengland.co.uk). ## ## Licensed to the Apache Software Foundation (ASF) under", "def is_deleted(self): return self.status == GenePanel.STATUS.deleted def reject(self): self.status = GenePanel.STATUS.internal self.save() def", "part of PanelApp ## (see https://panelapp.genomicsengland.co.uk). ## ## Licensed to the Apache Software", "CONDITIONS OF ANY ## KIND, either express or implied. See the License for", "get_active_panel(self, pk): return self.get_panel(pk).active_panel class GenePanel(TimeStampedModel): STATUS = Choices(\"promoted\", \"public\", \"retired\", \"internal\", \"deleted\")", ".order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\") .first() ) def get_panel_version(self, version): \"\"\"Get a specific version.", "\"genepanelentrysnapshot_set__evidence\", \"genepanelentrysnapshot_set__gene_core\", \"genepanelentrysnapshot_set__evaluation__comments\", ) .order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\") .first() ) def get_panel_version(self, version):", "\"\"\"Get a specific version. Version argument should be a string\"\"\" major_version, minor_version =", "may obtain a copy of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ##", "\"panel\", \"level4title\", \"genepanelentrysnapshot_set\", \"genepanelentrysnapshot_set__tags\", \"genepanelentrysnapshot_set__evidence\", \"genepanelentrysnapshot_set__gene_core\", \"genepanelentrysnapshot_set__evaluation__comments\", ) .order_by(\"-major_version\", \"-minor_version\", \"-modified\", \"-pk\") .first()", "# Mongo ObjectID hex string name = models.CharField(max_length=255, db_index=True) status = models.CharField( choices=STATUS,", "compliance ## with the License. You may obtain a copy of the License", ") types = models.ManyToManyField(PanelType) objects = GenePanelManager() def __str__(self): ap = self.active_panel return", "@cached_property def active_panel(self): \"\"\"Return the panel with the largest version\"\"\" return self.genepanelsnapshot_set.order_by( \"-major_version\",", "import Case from django.db.models import When from django.db.models import Value from django.urls import", "writing, ## software distributed under the License is distributed on an ## \"AS", "\"retired\", \"internal\", \"deleted\") old_pk = models.CharField( max_length=24, null=True, blank=True, db_index=True ) # Mongo", "Ltd. ## ## This file is part of PanelApp ## (see https://panelapp.genomicsengland.co.uk). ##", "England Ltd. ## ## This file is part of PanelApp ## (see https://panelapp.genomicsengland.co.uk).", "argument should be a string\"\"\" major_version, minor_version = version.split(\".\") return ( self._prepare_panel_query() .filter(major_version=int(major_version),", "## Unless required by applicable law or agreed to in writing, ## software", "return super().get_queryset().get(pk=pk) else: return super().get_queryset().get(old_pk=pk) def get_active_panel(self, pk): return self.get_panel(pk).active_panel class GenePanel(TimeStampedModel): STATUS", "Case( When(genepanelentrysnapshot__saved_gel_status=2, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) ), number_of_red_genes=Sum( Case( When(genepanelentrysnapshot__saved_gel_status=1, then=Value(1)), default=Value(0), output_field=models.IntegerField(),", "the License. ## from django.db import models from django.db.models import Sum from django.db.models", "on an ## \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY ##", "ASF licenses this file ## to you under the Apache License, Version 2.0", "The ASF licenses this file ## to you under the Apache License, Version", "largest version and related info\"\"\" return ( self.genepanelsnapshot_set.prefetch_related( \"panel\", \"level4title\", \"genepanelentrysnapshot_set\", \"genepanelentrysnapshot_set__tags\", \"genepanelentrysnapshot_set__evidence\",", "version\"\"\" return ( self.genepanelsnapshot_set.prefetch_related(\"panel\", \"level4title\") .annotate( number_of_green_genes=Sum( Case( When( genepanelentrysnapshot__saved_gel_status__gt=3, then=Value(1), ), default=Value(0),", "TimeStampedModel from .panel_types import PanelType class GenePanelManager(models.Manager): def get_panel(self, pk): if pk.isdigit(): return" ]
[ "import reverse from series_tiempo_ar_api.apps.api.tests.endpoint_tests.endpoint_test_case import EndpointTestCase class PaginationTests(EndpointTestCase): def test_get_single_value(self): resp = self.client.get(reverse('api:series:series'),", "self.assertEqual(len(resp.json()['data']), 1) def test_get_five_offset_values(self): data = {'ids': self.increasing_month_series_id, 'start': 5, 'limit': 5} resp", "resp = self.run_query(data) data = [ ['1999-06-01', 105], ['1999-07-01', 106], ['1999-08-01', 107], ['1999-09-01',", "= self.run_query(data) data = [ ['1999-06-01', 105], ['1999-07-01', 106], ['1999-08-01', 107], ['1999-09-01', 108],", "class PaginationTests(EndpointTestCase): def test_get_single_value(self): resp = self.client.get(reverse('api:series:series'), data={'ids': self.increasing_month_series_id, 'limit': 1}) self.assertEqual(len(resp.json()['data']), 1)", "self.client.get(reverse('api:series:series'), data={'ids': self.increasing_month_series_id, 'limit': 1}) self.assertEqual(len(resp.json()['data']), 1) def test_get_five_offset_values(self): data = {'ids': self.increasing_month_series_id,", "data = {'ids': self.increasing_month_series_id, 'start': 5, 'limit': 5} resp = self.run_query(data) data =", "'limit': 1}) self.assertEqual(len(resp.json()['data']), 1) def test_get_five_offset_values(self): data = {'ids': self.increasing_month_series_id, 'start': 5, 'limit':", "EndpointTestCase class PaginationTests(EndpointTestCase): def test_get_single_value(self): resp = self.client.get(reverse('api:series:series'), data={'ids': self.increasing_month_series_id, 'limit': 1}) self.assertEqual(len(resp.json()['data']),", "from django.urls import reverse from series_tiempo_ar_api.apps.api.tests.endpoint_tests.endpoint_test_case import EndpointTestCase class PaginationTests(EndpointTestCase): def test_get_single_value(self): resp", "1) def test_get_five_offset_values(self): data = {'ids': self.increasing_month_series_id, 'start': 5, 'limit': 5} resp =", "django.urls import reverse from series_tiempo_ar_api.apps.api.tests.endpoint_tests.endpoint_test_case import EndpointTestCase class PaginationTests(EndpointTestCase): def test_get_single_value(self): resp =", "data={'ids': self.increasing_month_series_id, 'limit': 1}) self.assertEqual(len(resp.json()['data']), 1) def test_get_five_offset_values(self): data = {'ids': self.increasing_month_series_id, 'start':", "data = [ ['1999-06-01', 105], ['1999-07-01', 106], ['1999-08-01', 107], ['1999-09-01', 108], ['1999-10-01', 109],", "def test_get_single_value(self): resp = self.client.get(reverse('api:series:series'), data={'ids': self.increasing_month_series_id, 'limit': 1}) self.assertEqual(len(resp.json()['data']), 1) def test_get_five_offset_values(self):", "import EndpointTestCase class PaginationTests(EndpointTestCase): def test_get_single_value(self): resp = self.client.get(reverse('api:series:series'), data={'ids': self.increasing_month_series_id, 'limit': 1})", "'limit': 5} resp = self.run_query(data) data = [ ['1999-06-01', 105], ['1999-07-01', 106], ['1999-08-01',", "['1999-06-01', 105], ['1999-07-01', 106], ['1999-08-01', 107], ['1999-09-01', 108], ['1999-10-01', 109], ] self.assertEqual(resp['data'], data)", "def test_get_five_offset_values(self): data = {'ids': self.increasing_month_series_id, 'start': 5, 'limit': 5} resp = self.run_query(data)", "5} resp = self.run_query(data) data = [ ['1999-06-01', 105], ['1999-07-01', 106], ['1999-08-01', 107],", "self.increasing_month_series_id, 'start': 5, 'limit': 5} resp = self.run_query(data) data = [ ['1999-06-01', 105],", "5, 'limit': 5} resp = self.run_query(data) data = [ ['1999-06-01', 105], ['1999-07-01', 106],", "from series_tiempo_ar_api.apps.api.tests.endpoint_tests.endpoint_test_case import EndpointTestCase class PaginationTests(EndpointTestCase): def test_get_single_value(self): resp = self.client.get(reverse('api:series:series'), data={'ids': self.increasing_month_series_id,", "[ ['1999-06-01', 105], ['1999-07-01', 106], ['1999-08-01', 107], ['1999-09-01', 108], ['1999-10-01', 109], ] self.assertEqual(resp['data'],", "resp = self.client.get(reverse('api:series:series'), data={'ids': self.increasing_month_series_id, 'limit': 1}) self.assertEqual(len(resp.json()['data']), 1) def test_get_five_offset_values(self): data =", "{'ids': self.increasing_month_series_id, 'start': 5, 'limit': 5} resp = self.run_query(data) data = [ ['1999-06-01',", "= self.client.get(reverse('api:series:series'), data={'ids': self.increasing_month_series_id, 'limit': 1}) self.assertEqual(len(resp.json()['data']), 1) def test_get_five_offset_values(self): data = {'ids':", "self.run_query(data) data = [ ['1999-06-01', 105], ['1999-07-01', 106], ['1999-08-01', 107], ['1999-09-01', 108], ['1999-10-01',", "self.increasing_month_series_id, 'limit': 1}) self.assertEqual(len(resp.json()['data']), 1) def test_get_five_offset_values(self): data = {'ids': self.increasing_month_series_id, 'start': 5,", "reverse from series_tiempo_ar_api.apps.api.tests.endpoint_tests.endpoint_test_case import EndpointTestCase class PaginationTests(EndpointTestCase): def test_get_single_value(self): resp = self.client.get(reverse('api:series:series'), data={'ids':", "test_get_single_value(self): resp = self.client.get(reverse('api:series:series'), data={'ids': self.increasing_month_series_id, 'limit': 1}) self.assertEqual(len(resp.json()['data']), 1) def test_get_five_offset_values(self): data", "= {'ids': self.increasing_month_series_id, 'start': 5, 'limit': 5} resp = self.run_query(data) data = [", "series_tiempo_ar_api.apps.api.tests.endpoint_tests.endpoint_test_case import EndpointTestCase class PaginationTests(EndpointTestCase): def test_get_single_value(self): resp = self.client.get(reverse('api:series:series'), data={'ids': self.increasing_month_series_id, 'limit':", "PaginationTests(EndpointTestCase): def test_get_single_value(self): resp = self.client.get(reverse('api:series:series'), data={'ids': self.increasing_month_series_id, 'limit': 1}) self.assertEqual(len(resp.json()['data']), 1) def", "test_get_five_offset_values(self): data = {'ids': self.increasing_month_series_id, 'start': 5, 'limit': 5} resp = self.run_query(data) data", "= [ ['1999-06-01', 105], ['1999-07-01', 106], ['1999-08-01', 107], ['1999-09-01', 108], ['1999-10-01', 109], ]", "1}) self.assertEqual(len(resp.json()['data']), 1) def test_get_five_offset_values(self): data = {'ids': self.increasing_month_series_id, 'start': 5, 'limit': 5}", "'start': 5, 'limit': 5} resp = self.run_query(data) data = [ ['1999-06-01', 105], ['1999-07-01'," ]
[ "1]) latent_prior = tfp.distributions.MultivariateNormalDiag( loc=tf.zeros(shape=[2], dtype=tf.float32), scale_identity_multiplier=1.0) # checkpoints checkpoint = tf.train.Checkpoint( optimizer=optimizer,", "for images, labels in dataset_test: outputs_dist, z_dist, z = model(images, labels) loss =", "consistent execution random.seed(args.seed) np.random.seed(args.seed) tf.set_random_seed(args.seed) # define hyperparameters params = Params() print('params:', params)", "os.makedirs(args.job_dir) # enable eager execution tf.enable_eager_execution() # set random seeds for consistent execution", "= tf.keras.datasets.mnist.load_data() # prepare the images by casting and rescaling images_train = prep_images(images_train)", "print('params:', params) # load MNIST dataset ((images_train, labels_train), (images_test, labels_test)) = tf.keras.datasets.mnist.load_data() #", "labels in dataset_test: outputs_dist, z_dist, z = model(images, labels) loss = losses.variational(outputs_dist, z_dist,", "parser.parse_args() print('args:', args) # create a job directory if it doesn't already exist", "batch_size=params.batch_size, shuffle=True) dataset_test = get_dataset( (images_test, labels_test), batch_size=params.batch_size) # model / optimization global_step", "attr.ib(default=1024) def main(): parser = argparse.ArgumentParser() parser.add_argument('--job-dir', required=True) parser.add_argument('--seed', default=67, type=int) args =", "statistics from the training set images_loc = images_train.mean() images_scale = images_train.std() # define", "execution tf.enable_eager_execution() # set random seeds for consistent execution random.seed(args.seed) np.random.seed(args.seed) tf.set_random_seed(args.seed) #", "# checkpoints checkpoint = tf.train.Checkpoint( optimizer=optimizer, model=model, global_step=global_step) checkpoint_path = tf.train.latest_checkpoint(args.job_dir) if checkpoint_path", "<gh_stars>1-10 import os import attr import random import argparse import numpy as np", "# define datasets for sampling batches dataset_train = get_dataset( (images_train, labels_train), batch_size=params.batch_size, shuffle=True)", "tf.contrib.summary.image( name='image/eval', tensor=images, max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/eval', tensor=outputs_dist.mean(), max_images=1, step=global_step) pbar.set_description('loss (train): {},", "parser = argparse.ArgumentParser() parser.add_argument('--job-dir', required=True) parser.add_argument('--seed', default=67, type=int) args = parser.parse_args() print('args:', args)", "= zip(grads, model.trainable_variables) optimizer.apply_gradients( grads_and_vars, global_step=global_step) with tf.contrib.summary.always_record_summaries(): loss_train.result() tf.contrib.summary.scalar( name='grad_norm', tensor=tf.global_norm(grads)) tf.contrib.summary.image(", "# compute statistics from the training set images_loc = images_train.mean() images_scale = images_train.std()", "labels) loss = losses.variational(outputs_dist, z_dist, images, latent_prior) loss_test(loss) with tf.contrib.summary.always_record_summaries(): loss_test.result() tf.contrib.summary.image( name='image/eval',", "import os import attr import random import argparse import numpy as np import", "batch_size=params.batch_size) # model / optimization global_step = tf.train.get_or_create_global_step() optimizer = tf.train.AdamOptimizer(learning_rate=params.learning_rate) model =", "tfe import tensorflow_probability as tfp from tqdm import trange from vae import losses", "# model / optimization global_step = tf.train.get_or_create_global_step() optimizer = tf.train.AdamOptimizer(learning_rate=params.learning_rate) model = Model(", "/ optimization global_step = tf.train.get_or_create_global_step() optimizer = tf.train.AdamOptimizer(learning_rate=params.learning_rate) model = Model( inputs_loc=images_loc, inputs_scale=images_scale,", "with tf.contrib.summary.always_record_summaries(): loss_test.result() tf.contrib.summary.image( name='image/eval', tensor=images, max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/eval', tensor=outputs_dist.mean(), max_images=1, step=global_step)", "model.trainable_variables) grads_and_vars = zip(grads, model.trainable_variables) optimizer.apply_gradients( grads_and_vars, global_step=global_step) with tf.contrib.summary.always_record_summaries(): loss_train.result() tf.contrib.summary.scalar( name='grad_norm',", "tensor=outputs_dist.mean(), max_images=1, step=global_step) loss_test = tfe.metrics.Mean(name='loss/eval') for images, labels in dataset_test: outputs_dist, z_dist,", "loss = losses.variational(outputs_dist, z_dist, images, latent_prior) loss_test(loss) with tf.contrib.summary.always_record_summaries(): loss_test.result() tf.contrib.summary.image( name='image/eval', tensor=images,", "import attr import random import argparse import numpy as np import tensorflow as", "= attr.ib(default=100) batch_size = attr.ib(default=1024) def main(): parser = argparse.ArgumentParser() parser.add_argument('--job-dir', required=True) parser.add_argument('--seed',", "random import argparse import numpy as np import tensorflow as tf import tensorflow.contrib.eager", "random.seed(args.seed) np.random.seed(args.seed) tf.set_random_seed(args.seed) # define hyperparameters params = Params() print('params:', params) # load", "random seeds for consistent execution random.seed(args.seed) np.random.seed(args.seed) tf.set_random_seed(args.seed) # define hyperparameters params =", "z = model( images, labels, training=True) loss = losses.variational(outputs_dist, z_dist, images, latent_prior) loss_train(loss)", "model.trainable_variables) optimizer.apply_gradients( grads_and_vars, global_step=global_step) with tf.contrib.summary.always_record_summaries(): loss_train.result() tf.contrib.summary.scalar( name='grad_norm', tensor=tf.global_norm(grads)) tf.contrib.summary.image( name='image/train', tensor=images,", "= parser.parse_args() print('args:', args) # create a job directory if it doesn't already", "labels_train), (images_test, labels_test)) = tf.keras.datasets.mnist.load_data() # prepare the images by casting and rescaling", "and rescaling images_train = prep_images(images_train) images_test = prep_images(images_test) # compute statistics from the", "loss = losses.variational(outputs_dist, z_dist, images, latent_prior) loss_train(loss) grads = tape.gradient(loss, model.trainable_variables) grads_and_vars =", "np import tensorflow as tf import tensorflow.contrib.eager as tfe import tensorflow_probability as tfp", "images_loc = images_train.mean() images_scale = images_train.std() # define datasets for sampling batches dataset_train", "as pbar: for epoch in pbar: loss_train = tfe.metrics.Mean(name='loss/train') for images, labels in", "define datasets for sampling batches dataset_train = get_dataset( (images_train, labels_train), batch_size=params.batch_size, shuffle=True) dataset_test", "labels_test), batch_size=params.batch_size) # model / optimization global_step = tf.train.get_or_create_global_step() optimizer = tf.train.AdamOptimizer(learning_rate=params.learning_rate) model", "images_scale = images_train.std() # define datasets for sampling batches dataset_train = get_dataset( (images_train,", "not None: checkpoint.restore(checkpoint_path).assert_consumed() # summaries summary_writer = tf.contrib.summary.create_file_writer( args.job_dir, max_queue=1, flush_millis=1000) summary_writer.set_as_default() with", "step=global_step) loss_test = tfe.metrics.Mean(name='loss/eval') for images, labels in dataset_test: outputs_dist, z_dist, z =", "os.path.exists(args.job_dir): os.makedirs(args.job_dir) # enable eager execution tf.enable_eager_execution() # set random seeds for consistent", "for sampling batches dataset_train = get_dataset( (images_train, labels_train), batch_size=params.batch_size, shuffle=True) dataset_test = get_dataset(", "tensorflow.contrib.eager as tfe import tensorflow_probability as tfp from tqdm import trange from vae", "args = parser.parse_args() print('args:', args) # create a job directory if it doesn't", "= losses.variational(outputs_dist, z_dist, images, latent_prior) loss_train(loss) grads = tape.gradient(loss, model.trainable_variables) grads_and_vars = zip(grads,", "loss_train.result() tf.contrib.summary.scalar( name='grad_norm', tensor=tf.global_norm(grads)) tf.contrib.summary.image( name='image/train', tensor=images, max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/train', tensor=outputs_dist.mean(), max_images=1,", "= tf.contrib.summary.create_file_writer( args.job_dir, max_queue=1, flush_millis=1000) summary_writer.set_as_default() with trange(params.epochs) as pbar: for epoch in", "vae.data import prep_images, get_dataset from vae.model import Model @attr.s class Params: \"\"\" Container", "tf.contrib.summary.always_record_summaries(): loss_train.result() tf.contrib.summary.scalar( name='grad_norm', tensor=tf.global_norm(grads)) tf.contrib.summary.image( name='image/train', tensor=images, max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/train', tensor=outputs_dist.mean(),", "{}'.format( loss_train.result().numpy(), loss_test.result().numpy())) checkpoint_prefix = os.path.join(args.job_dir, 'ckpt') checkpoint.save(checkpoint_prefix) if __name__ == '__main__': main()", "by casting and rescaling images_train = prep_images(images_train) images_test = prep_images(images_test) # compute statistics", "dataset_test: outputs_dist, z_dist, z = model(images, labels) loss = losses.variational(outputs_dist, z_dist, images, latent_prior)", "summary_writer = tf.contrib.summary.create_file_writer( args.job_dir, max_queue=1, flush_millis=1000) summary_writer.set_as_default() with trange(params.epochs) as pbar: for epoch", "= tape.gradient(loss, model.trainable_variables) grads_and_vars = zip(grads, model.trainable_variables) optimizer.apply_gradients( grads_and_vars, global_step=global_step) with tf.contrib.summary.always_record_summaries(): loss_train.result()", "Params() print('params:', params) # load MNIST dataset ((images_train, labels_train), (images_test, labels_test)) = tf.keras.datasets.mnist.load_data()", "for hyperparameters. \"\"\" learning_rate = attr.ib(default=1e-3) epochs = attr.ib(default=100) batch_size = attr.ib(default=1024) def", "None: checkpoint.restore(checkpoint_path).assert_consumed() # summaries summary_writer = tf.contrib.summary.create_file_writer( args.job_dir, max_queue=1, flush_millis=1000) summary_writer.set_as_default() with trange(params.epochs)", "global_step=global_step) checkpoint_path = tf.train.latest_checkpoint(args.job_dir) if checkpoint_path is not None: checkpoint.restore(checkpoint_path).assert_consumed() # summaries summary_writer", "for images, labels in dataset_train: with tf.GradientTape() as tape: outputs_dist, z_dist, z =", "params = Params() print('params:', params) # load MNIST dataset ((images_train, labels_train), (images_test, labels_test))", "labels_test)) = tf.keras.datasets.mnist.load_data() # prepare the images by casting and rescaling images_train =", "labels, training=True) loss = losses.variational(outputs_dist, z_dist, images, latent_prior) loss_train(loss) grads = tape.gradient(loss, model.trainable_variables)", "= tfp.distributions.MultivariateNormalDiag( loc=tf.zeros(shape=[2], dtype=tf.float32), scale_identity_multiplier=1.0) # checkpoints checkpoint = tf.train.Checkpoint( optimizer=optimizer, model=model, global_step=global_step)", "loss_test.result() tf.contrib.summary.image( name='image/eval', tensor=images, max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/eval', tensor=outputs_dist.mean(), max_images=1, step=global_step) pbar.set_description('loss (train):", "inputs_shape=[28, 28, 1]) latent_prior = tfp.distributions.MultivariateNormalDiag( loc=tf.zeros(shape=[2], dtype=tf.float32), scale_identity_multiplier=1.0) # checkpoints checkpoint =", "(images_train, labels_train), batch_size=params.batch_size, shuffle=True) dataset_test = get_dataset( (images_test, labels_test), batch_size=params.batch_size) # model /", "= attr.ib(default=1024) def main(): parser = argparse.ArgumentParser() parser.add_argument('--job-dir', required=True) parser.add_argument('--seed', default=67, type=int) args", "z_dist, z = model( images, labels, training=True) loss = losses.variational(outputs_dist, z_dist, images, latent_prior)", "rescaling images_train = prep_images(images_train) images_test = prep_images(images_test) # compute statistics from the training", "loss_test(loss) with tf.contrib.summary.always_record_summaries(): loss_test.result() tf.contrib.summary.image( name='image/eval', tensor=images, max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/eval', tensor=outputs_dist.mean(), max_images=1,", "prep_images(images_train) images_test = prep_images(images_test) # compute statistics from the training set images_loc =", "# load MNIST dataset ((images_train, labels_train), (images_test, labels_test)) = tf.keras.datasets.mnist.load_data() # prepare the", "it doesn't already exist if not os.path.exists(args.job_dir): os.makedirs(args.job_dir) # enable eager execution tf.enable_eager_execution()", "= get_dataset( (images_train, labels_train), batch_size=params.batch_size, shuffle=True) dataset_test = get_dataset( (images_test, labels_test), batch_size=params.batch_size) #", "# create a job directory if it doesn't already exist if not os.path.exists(args.job_dir):", "latent_prior = tfp.distributions.MultivariateNormalDiag( loc=tf.zeros(shape=[2], dtype=tf.float32), scale_identity_multiplier=1.0) # checkpoints checkpoint = tf.train.Checkpoint( optimizer=optimizer, model=model,", "name='image/train', tensor=images, max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/train', tensor=outputs_dist.mean(), max_images=1, step=global_step) loss_test = tfe.metrics.Mean(name='loss/eval') for", "step=global_step) tf.contrib.summary.image( name='outputs/train', tensor=outputs_dist.mean(), max_images=1, step=global_step) loss_test = tfe.metrics.Mean(name='loss/eval') for images, labels in", "type=int) args = parser.parse_args() print('args:', args) # create a job directory if it", "model / optimization global_step = tf.train.get_or_create_global_step() optimizer = tf.train.AdamOptimizer(learning_rate=params.learning_rate) model = Model( inputs_loc=images_loc,", "tf.contrib.summary.always_record_summaries(): loss_test.result() tf.contrib.summary.image( name='image/eval', tensor=images, max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/eval', tensor=outputs_dist.mean(), max_images=1, step=global_step) pbar.set_description('loss", "optimization global_step = tf.train.get_or_create_global_step() optimizer = tf.train.AdamOptimizer(learning_rate=params.learning_rate) model = Model( inputs_loc=images_loc, inputs_scale=images_scale, inputs_shape=[28,", "for consistent execution random.seed(args.seed) np.random.seed(args.seed) tf.set_random_seed(args.seed) # define hyperparameters params = Params() print('params:',", "from vae.model import Model @attr.s class Params: \"\"\" Container for hyperparameters. \"\"\" learning_rate", "class Params: \"\"\" Container for hyperparameters. \"\"\" learning_rate = attr.ib(default=1e-3) epochs = attr.ib(default=100)", "# enable eager execution tf.enable_eager_execution() # set random seeds for consistent execution random.seed(args.seed)", "dataset_train = get_dataset( (images_train, labels_train), batch_size=params.batch_size, shuffle=True) dataset_test = get_dataset( (images_test, labels_test), batch_size=params.batch_size)", "= attr.ib(default=1e-3) epochs = attr.ib(default=100) batch_size = attr.ib(default=1024) def main(): parser = argparse.ArgumentParser()", "name='image/eval', tensor=images, max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/eval', tensor=outputs_dist.mean(), max_images=1, step=global_step) pbar.set_description('loss (train): {}, loss", "max_images=1, step=global_step) pbar.set_description('loss (train): {}, loss (eval): {}'.format( loss_train.result().numpy(), loss_test.result().numpy())) checkpoint_prefix = os.path.join(args.job_dir,", "(images_test, labels_test), batch_size=params.batch_size) # model / optimization global_step = tf.train.get_or_create_global_step() optimizer = tf.train.AdamOptimizer(learning_rate=params.learning_rate)", "name='grad_norm', tensor=tf.global_norm(grads)) tf.contrib.summary.image( name='image/train', tensor=images, max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/train', tensor=outputs_dist.mean(), max_images=1, step=global_step) loss_test", "dataset_test = get_dataset( (images_test, labels_test), batch_size=params.batch_size) # model / optimization global_step = tf.train.get_or_create_global_step()", "attr import random import argparse import numpy as np import tensorflow as tf", "vae import losses from vae.data import prep_images, get_dataset from vae.model import Model @attr.s", "images, labels in dataset_test: outputs_dist, z_dist, z = model(images, labels) loss = losses.variational(outputs_dist,", "z = model(images, labels) loss = losses.variational(outputs_dist, z_dist, images, latent_prior) loss_test(loss) with tf.contrib.summary.always_record_summaries():", "model=model, global_step=global_step) checkpoint_path = tf.train.latest_checkpoint(args.job_dir) if checkpoint_path is not None: checkpoint.restore(checkpoint_path).assert_consumed() # summaries", "((images_train, labels_train), (images_test, labels_test)) = tf.keras.datasets.mnist.load_data() # prepare the images by casting and", "tape.gradient(loss, model.trainable_variables) grads_and_vars = zip(grads, model.trainable_variables) optimizer.apply_gradients( grads_and_vars, global_step=global_step) with tf.contrib.summary.always_record_summaries(): loss_train.result() tf.contrib.summary.scalar(", "= tf.train.latest_checkpoint(args.job_dir) if checkpoint_path is not None: checkpoint.restore(checkpoint_path).assert_consumed() # summaries summary_writer = tf.contrib.summary.create_file_writer(", "global_step = tf.train.get_or_create_global_step() optimizer = tf.train.AdamOptimizer(learning_rate=params.learning_rate) model = Model( inputs_loc=images_loc, inputs_scale=images_scale, inputs_shape=[28, 28,", "enable eager execution tf.enable_eager_execution() # set random seeds for consistent execution random.seed(args.seed) np.random.seed(args.seed)", "tf.contrib.summary.image( name='outputs/train', tensor=outputs_dist.mean(), max_images=1, step=global_step) loss_test = tfe.metrics.Mean(name='loss/eval') for images, labels in dataset_test:", "tensorflow as tf import tensorflow.contrib.eager as tfe import tensorflow_probability as tfp from tqdm", "with tf.contrib.summary.always_record_summaries(): loss_train.result() tf.contrib.summary.scalar( name='grad_norm', tensor=tf.global_norm(grads)) tf.contrib.summary.image( name='image/train', tensor=images, max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/train',", "pbar.set_description('loss (train): {}, loss (eval): {}'.format( loss_train.result().numpy(), loss_test.result().numpy())) checkpoint_prefix = os.path.join(args.job_dir, 'ckpt') checkpoint.save(checkpoint_prefix)", "for epoch in pbar: loss_train = tfe.metrics.Mean(name='loss/train') for images, labels in dataset_train: with", "eager execution tf.enable_eager_execution() # set random seeds for consistent execution random.seed(args.seed) np.random.seed(args.seed) tf.set_random_seed(args.seed)", "from tqdm import trange from vae import losses from vae.data import prep_images, get_dataset", "execution random.seed(args.seed) np.random.seed(args.seed) tf.set_random_seed(args.seed) # define hyperparameters params = Params() print('params:', params) #", "with tf.GradientTape() as tape: outputs_dist, z_dist, z = model( images, labels, training=True) loss", "as tfp from tqdm import trange from vae import losses from vae.data import", "= prep_images(images_train) images_test = prep_images(images_test) # compute statistics from the training set images_loc", "import numpy as np import tensorflow as tf import tensorflow.contrib.eager as tfe import", "= Model( inputs_loc=images_loc, inputs_scale=images_scale, inputs_shape=[28, 28, 1]) latent_prior = tfp.distributions.MultivariateNormalDiag( loc=tf.zeros(shape=[2], dtype=tf.float32), scale_identity_multiplier=1.0)", "if checkpoint_path is not None: checkpoint.restore(checkpoint_path).assert_consumed() # summaries summary_writer = tf.contrib.summary.create_file_writer( args.job_dir, max_queue=1,", "model = Model( inputs_loc=images_loc, inputs_scale=images_scale, inputs_shape=[28, 28, 1]) latent_prior = tfp.distributions.MultivariateNormalDiag( loc=tf.zeros(shape=[2], dtype=tf.float32),", "= model(images, labels) loss = losses.variational(outputs_dist, z_dist, images, latent_prior) loss_test(loss) with tf.contrib.summary.always_record_summaries(): loss_test.result()", "checkpoints checkpoint = tf.train.Checkpoint( optimizer=optimizer, model=model, global_step=global_step) checkpoint_path = tf.train.latest_checkpoint(args.job_dir) if checkpoint_path is", "print('args:', args) # create a job directory if it doesn't already exist if", "learning_rate = attr.ib(default=1e-3) epochs = attr.ib(default=100) batch_size = attr.ib(default=1024) def main(): parser =", "default=67, type=int) args = parser.parse_args() print('args:', args) # create a job directory if", "params) # load MNIST dataset ((images_train, labels_train), (images_test, labels_test)) = tf.keras.datasets.mnist.load_data() # prepare", "= images_train.std() # define datasets for sampling batches dataset_train = get_dataset( (images_train, labels_train),", "outputs_dist, z_dist, z = model( images, labels, training=True) loss = losses.variational(outputs_dist, z_dist, images,", "images by casting and rescaling images_train = prep_images(images_train) images_test = prep_images(images_test) # compute", "prep_images(images_test) # compute statistics from the training set images_loc = images_train.mean() images_scale =", "grads_and_vars, global_step=global_step) with tf.contrib.summary.always_record_summaries(): loss_train.result() tf.contrib.summary.scalar( name='grad_norm', tensor=tf.global_norm(grads)) tf.contrib.summary.image( name='image/train', tensor=images, max_images=1, step=global_step)", "# summaries summary_writer = tf.contrib.summary.create_file_writer( args.job_dir, max_queue=1, flush_millis=1000) summary_writer.set_as_default() with trange(params.epochs) as pbar:", "labels_train), batch_size=params.batch_size, shuffle=True) dataset_test = get_dataset( (images_test, labels_test), batch_size=params.batch_size) # model / optimization", "import prep_images, get_dataset from vae.model import Model @attr.s class Params: \"\"\" Container for", "optimizer.apply_gradients( grads_and_vars, global_step=global_step) with tf.contrib.summary.always_record_summaries(): loss_train.result() tf.contrib.summary.scalar( name='grad_norm', tensor=tf.global_norm(grads)) tf.contrib.summary.image( name='image/train', tensor=images, max_images=1,", "tf.train.AdamOptimizer(learning_rate=params.learning_rate) model = Model( inputs_loc=images_loc, inputs_scale=images_scale, inputs_shape=[28, 28, 1]) latent_prior = tfp.distributions.MultivariateNormalDiag( loc=tf.zeros(shape=[2],", "tfp from tqdm import trange from vae import losses from vae.data import prep_images,", "directory if it doesn't already exist if not os.path.exists(args.job_dir): os.makedirs(args.job_dir) # enable eager", "seeds for consistent execution random.seed(args.seed) np.random.seed(args.seed) tf.set_random_seed(args.seed) # define hyperparameters params = Params()", "latent_prior) loss_test(loss) with tf.contrib.summary.always_record_summaries(): loss_test.result() tf.contrib.summary.image( name='image/eval', tensor=images, max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/eval', tensor=outputs_dist.mean(),", "numpy as np import tensorflow as tf import tensorflow.contrib.eager as tfe import tensorflow_probability", "import trange from vae import losses from vae.data import prep_images, get_dataset from vae.model", "optimizer=optimizer, model=model, global_step=global_step) checkpoint_path = tf.train.latest_checkpoint(args.job_dir) if checkpoint_path is not None: checkpoint.restore(checkpoint_path).assert_consumed() #", "loss_test = tfe.metrics.Mean(name='loss/eval') for images, labels in dataset_test: outputs_dist, z_dist, z = model(images,", "import tensorflow.contrib.eager as tfe import tensorflow_probability as tfp from tqdm import trange from", "checkpoint = tf.train.Checkpoint( optimizer=optimizer, model=model, global_step=global_step) checkpoint_path = tf.train.latest_checkpoint(args.job_dir) if checkpoint_path is not", "tqdm import trange from vae import losses from vae.data import prep_images, get_dataset from", "# set random seeds for consistent execution random.seed(args.seed) np.random.seed(args.seed) tf.set_random_seed(args.seed) # define hyperparameters", "zip(grads, model.trainable_variables) optimizer.apply_gradients( grads_and_vars, global_step=global_step) with tf.contrib.summary.always_record_summaries(): loss_train.result() tf.contrib.summary.scalar( name='grad_norm', tensor=tf.global_norm(grads)) tf.contrib.summary.image( name='image/train',", "args) # create a job directory if it doesn't already exist if not", "= tfe.metrics.Mean(name='loss/eval') for images, labels in dataset_test: outputs_dist, z_dist, z = model(images, labels)", "dataset ((images_train, labels_train), (images_test, labels_test)) = tf.keras.datasets.mnist.load_data() # prepare the images by casting", "exist if not os.path.exists(args.job_dir): os.makedirs(args.job_dir) # enable eager execution tf.enable_eager_execution() # set random", "loss_train(loss) grads = tape.gradient(loss, model.trainable_variables) grads_and_vars = zip(grads, model.trainable_variables) optimizer.apply_gradients( grads_and_vars, global_step=global_step) with", "trange(params.epochs) as pbar: for epoch in pbar: loss_train = tfe.metrics.Mean(name='loss/train') for images, labels", "doesn't already exist if not os.path.exists(args.job_dir): os.makedirs(args.job_dir) # enable eager execution tf.enable_eager_execution() #", "args.job_dir, max_queue=1, flush_millis=1000) summary_writer.set_as_default() with trange(params.epochs) as pbar: for epoch in pbar: loss_train", "epochs = attr.ib(default=100) batch_size = attr.ib(default=1024) def main(): parser = argparse.ArgumentParser() parser.add_argument('--job-dir', required=True)", "\"\"\" learning_rate = attr.ib(default=1e-3) epochs = attr.ib(default=100) batch_size = attr.ib(default=1024) def main(): parser", "the training set images_loc = images_train.mean() images_scale = images_train.std() # define datasets for", "z_dist, images, latent_prior) loss_train(loss) grads = tape.gradient(loss, model.trainable_variables) grads_and_vars = zip(grads, model.trainable_variables) optimizer.apply_gradients(", "load MNIST dataset ((images_train, labels_train), (images_test, labels_test)) = tf.keras.datasets.mnist.load_data() # prepare the images", "casting and rescaling images_train = prep_images(images_train) images_test = prep_images(images_test) # compute statistics from", "grads_and_vars = zip(grads, model.trainable_variables) optimizer.apply_gradients( grads_and_vars, global_step=global_step) with tf.contrib.summary.always_record_summaries(): loss_train.result() tf.contrib.summary.scalar( name='grad_norm', tensor=tf.global_norm(grads))", "name='outputs/train', tensor=outputs_dist.mean(), max_images=1, step=global_step) loss_test = tfe.metrics.Mean(name='loss/eval') for images, labels in dataset_test: outputs_dist,", "create a job directory if it doesn't already exist if not os.path.exists(args.job_dir): os.makedirs(args.job_dir)", "attr.ib(default=100) batch_size = attr.ib(default=1024) def main(): parser = argparse.ArgumentParser() parser.add_argument('--job-dir', required=True) parser.add_argument('--seed', default=67,", "= losses.variational(outputs_dist, z_dist, images, latent_prior) loss_test(loss) with tf.contrib.summary.always_record_summaries(): loss_test.result() tf.contrib.summary.image( name='image/eval', tensor=images, max_images=1,", "tf.train.get_or_create_global_step() optimizer = tf.train.AdamOptimizer(learning_rate=params.learning_rate) model = Model( inputs_loc=images_loc, inputs_scale=images_scale, inputs_shape=[28, 28, 1]) latent_prior", "np.random.seed(args.seed) tf.set_random_seed(args.seed) # define hyperparameters params = Params() print('params:', params) # load MNIST", "optimizer = tf.train.AdamOptimizer(learning_rate=params.learning_rate) model = Model( inputs_loc=images_loc, inputs_scale=images_scale, inputs_shape=[28, 28, 1]) latent_prior =", "step=global_step) tf.contrib.summary.image( name='outputs/eval', tensor=outputs_dist.mean(), max_images=1, step=global_step) pbar.set_description('loss (train): {}, loss (eval): {}'.format( loss_train.result().numpy(),", "compute statistics from the training set images_loc = images_train.mean() images_scale = images_train.std() #", "with trange(params.epochs) as pbar: for epoch in pbar: loss_train = tfe.metrics.Mean(name='loss/train') for images,", "attr.ib(default=1e-3) epochs = attr.ib(default=100) batch_size = attr.ib(default=1024) def main(): parser = argparse.ArgumentParser() parser.add_argument('--job-dir',", "summaries summary_writer = tf.contrib.summary.create_file_writer( args.job_dir, max_queue=1, flush_millis=1000) summary_writer.set_as_default() with trange(params.epochs) as pbar: for", "losses.variational(outputs_dist, z_dist, images, latent_prior) loss_test(loss) with tf.contrib.summary.always_record_summaries(): loss_test.result() tf.contrib.summary.image( name='image/eval', tensor=images, max_images=1, step=global_step)", "= tf.train.Checkpoint( optimizer=optimizer, model=model, global_step=global_step) checkpoint_path = tf.train.latest_checkpoint(args.job_dir) if checkpoint_path is not None:", "tf.train.Checkpoint( optimizer=optimizer, model=model, global_step=global_step) checkpoint_path = tf.train.latest_checkpoint(args.job_dir) if checkpoint_path is not None: checkpoint.restore(checkpoint_path).assert_consumed()", "model( images, labels, training=True) loss = losses.variational(outputs_dist, z_dist, images, latent_prior) loss_train(loss) grads =", "hyperparameters. \"\"\" learning_rate = attr.ib(default=1e-3) epochs = attr.ib(default=100) batch_size = attr.ib(default=1024) def main():", "import Model @attr.s class Params: \"\"\" Container for hyperparameters. \"\"\" learning_rate = attr.ib(default=1e-3)", "argparse import numpy as np import tensorflow as tf import tensorflow.contrib.eager as tfe", "tf.enable_eager_execution() # set random seeds for consistent execution random.seed(args.seed) np.random.seed(args.seed) tf.set_random_seed(args.seed) # define", "get_dataset from vae.model import Model @attr.s class Params: \"\"\" Container for hyperparameters. \"\"\"", "= tf.train.get_or_create_global_step() optimizer = tf.train.AdamOptimizer(learning_rate=params.learning_rate) model = Model( inputs_loc=images_loc, inputs_scale=images_scale, inputs_shape=[28, 28, 1])", "set images_loc = images_train.mean() images_scale = images_train.std() # define datasets for sampling batches", "tensorflow_probability as tfp from tqdm import trange from vae import losses from vae.data", "# define hyperparameters params = Params() print('params:', params) # load MNIST dataset ((images_train,", "tensor=outputs_dist.mean(), max_images=1, step=global_step) pbar.set_description('loss (train): {}, loss (eval): {}'.format( loss_train.result().numpy(), loss_test.result().numpy())) checkpoint_prefix =", "z_dist, images, latent_prior) loss_test(loss) with tf.contrib.summary.always_record_summaries(): loss_test.result() tf.contrib.summary.image( name='image/eval', tensor=images, max_images=1, step=global_step) tf.contrib.summary.image(", "if it doesn't already exist if not os.path.exists(args.job_dir): os.makedirs(args.job_dir) # enable eager execution", "checkpoint_path = tf.train.latest_checkpoint(args.job_dir) if checkpoint_path is not None: checkpoint.restore(checkpoint_path).assert_consumed() # summaries summary_writer =", "set random seeds for consistent execution random.seed(args.seed) np.random.seed(args.seed) tf.set_random_seed(args.seed) # define hyperparameters params", "import random import argparse import numpy as np import tensorflow as tf import", "get_dataset( (images_test, labels_test), batch_size=params.batch_size) # model / optimization global_step = tf.train.get_or_create_global_step() optimizer =", "tf.keras.datasets.mnist.load_data() # prepare the images by casting and rescaling images_train = prep_images(images_train) images_test", "epoch in pbar: loss_train = tfe.metrics.Mean(name='loss/train') for images, labels in dataset_train: with tf.GradientTape()", "trange from vae import losses from vae.data import prep_images, get_dataset from vae.model import", "from vae import losses from vae.data import prep_images, get_dataset from vae.model import Model", "latent_prior) loss_train(loss) grads = tape.gradient(loss, model.trainable_variables) grads_and_vars = zip(grads, model.trainable_variables) optimizer.apply_gradients( grads_and_vars, global_step=global_step)", "tf import tensorflow.contrib.eager as tfe import tensorflow_probability as tfp from tqdm import trange", "checkpoint_path is not None: checkpoint.restore(checkpoint_path).assert_consumed() # summaries summary_writer = tf.contrib.summary.create_file_writer( args.job_dir, max_queue=1, flush_millis=1000)", "= prep_images(images_test) # compute statistics from the training set images_loc = images_train.mean() images_scale", "a job directory if it doesn't already exist if not os.path.exists(args.job_dir): os.makedirs(args.job_dir) #", "images_train.std() # define datasets for sampling batches dataset_train = get_dataset( (images_train, labels_train), batch_size=params.batch_size,", "z_dist, z = model(images, labels) loss = losses.variational(outputs_dist, z_dist, images, latent_prior) loss_test(loss) with", "tensor=tf.global_norm(grads)) tf.contrib.summary.image( name='image/train', tensor=images, max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/train', tensor=outputs_dist.mean(), max_images=1, step=global_step) loss_test =", "tf.GradientTape() as tape: outputs_dist, z_dist, z = model( images, labels, training=True) loss =", "= get_dataset( (images_test, labels_test), batch_size=params.batch_size) # model / optimization global_step = tf.train.get_or_create_global_step() optimizer", "model(images, labels) loss = losses.variational(outputs_dist, z_dist, images, latent_prior) loss_test(loss) with tf.contrib.summary.always_record_summaries(): loss_test.result() tf.contrib.summary.image(", "tensor=images, max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/eval', tensor=outputs_dist.mean(), max_images=1, step=global_step) pbar.set_description('loss (train): {}, loss (eval):", "= model( images, labels, training=True) loss = losses.variational(outputs_dist, z_dist, images, latent_prior) loss_train(loss) grads", "step=global_step) pbar.set_description('loss (train): {}, loss (eval): {}'.format( loss_train.result().numpy(), loss_test.result().numpy())) checkpoint_prefix = os.path.join(args.job_dir, 'ckpt')", "as tf import tensorflow.contrib.eager as tfe import tensorflow_probability as tfp from tqdm import", "tf.contrib.summary.image( name='image/train', tensor=images, max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/train', tensor=outputs_dist.mean(), max_images=1, step=global_step) loss_test = tfe.metrics.Mean(name='loss/eval')", "max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/train', tensor=outputs_dist.mean(), max_images=1, step=global_step) loss_test = tfe.metrics.Mean(name='loss/eval') for images, labels", "\"\"\" Container for hyperparameters. \"\"\" learning_rate = attr.ib(default=1e-3) epochs = attr.ib(default=100) batch_size =", "main(): parser = argparse.ArgumentParser() parser.add_argument('--job-dir', required=True) parser.add_argument('--seed', default=67, type=int) args = parser.parse_args() print('args:',", "Model( inputs_loc=images_loc, inputs_scale=images_scale, inputs_shape=[28, 28, 1]) latent_prior = tfp.distributions.MultivariateNormalDiag( loc=tf.zeros(shape=[2], dtype=tf.float32), scale_identity_multiplier=1.0) #", "tf.contrib.summary.create_file_writer( args.job_dir, max_queue=1, flush_millis=1000) summary_writer.set_as_default() with trange(params.epochs) as pbar: for epoch in pbar:", "parser.add_argument('--job-dir', required=True) parser.add_argument('--seed', default=67, type=int) args = parser.parse_args() print('args:', args) # create a", "batch_size = attr.ib(default=1024) def main(): parser = argparse.ArgumentParser() parser.add_argument('--job-dir', required=True) parser.add_argument('--seed', default=67, type=int)", "name='outputs/eval', tensor=outputs_dist.mean(), max_images=1, step=global_step) pbar.set_description('loss (train): {}, loss (eval): {}'.format( loss_train.result().numpy(), loss_test.result().numpy())) checkpoint_prefix", "Container for hyperparameters. \"\"\" learning_rate = attr.ib(default=1e-3) epochs = attr.ib(default=100) batch_size = attr.ib(default=1024)", "sampling batches dataset_train = get_dataset( (images_train, labels_train), batch_size=params.batch_size, shuffle=True) dataset_test = get_dataset( (images_test,", "checkpoint.restore(checkpoint_path).assert_consumed() # summaries summary_writer = tf.contrib.summary.create_file_writer( args.job_dir, max_queue=1, flush_millis=1000) summary_writer.set_as_default() with trange(params.epochs) as", "= Params() print('params:', params) # load MNIST dataset ((images_train, labels_train), (images_test, labels_test)) =", "import argparse import numpy as np import tensorflow as tf import tensorflow.contrib.eager as", "images, labels, training=True) loss = losses.variational(outputs_dist, z_dist, images, latent_prior) loss_train(loss) grads = tape.gradient(loss,", "images_train = prep_images(images_train) images_test = prep_images(images_test) # compute statistics from the training set", "tf.set_random_seed(args.seed) # define hyperparameters params = Params() print('params:', params) # load MNIST dataset", "max_images=1, step=global_step) loss_test = tfe.metrics.Mean(name='loss/eval') for images, labels in dataset_test: outputs_dist, z_dist, z", "summary_writer.set_as_default() with trange(params.epochs) as pbar: for epoch in pbar: loss_train = tfe.metrics.Mean(name='loss/train') for", "in dataset_test: outputs_dist, z_dist, z = model(images, labels) loss = losses.variational(outputs_dist, z_dist, images,", "pbar: loss_train = tfe.metrics.Mean(name='loss/train') for images, labels in dataset_train: with tf.GradientTape() as tape:", "max_queue=1, flush_millis=1000) summary_writer.set_as_default() with trange(params.epochs) as pbar: for epoch in pbar: loss_train =", "loss_train = tfe.metrics.Mean(name='loss/train') for images, labels in dataset_train: with tf.GradientTape() as tape: outputs_dist,", "vae.model import Model @attr.s class Params: \"\"\" Container for hyperparameters. \"\"\" learning_rate =", "training set images_loc = images_train.mean() images_scale = images_train.std() # define datasets for sampling", "= tf.train.AdamOptimizer(learning_rate=params.learning_rate) model = Model( inputs_loc=images_loc, inputs_scale=images_scale, inputs_shape=[28, 28, 1]) latent_prior = tfp.distributions.MultivariateNormalDiag(", "as np import tensorflow as tf import tensorflow.contrib.eager as tfe import tensorflow_probability as", "tape: outputs_dist, z_dist, z = model( images, labels, training=True) loss = losses.variational(outputs_dist, z_dist,", "is not None: checkpoint.restore(checkpoint_path).assert_consumed() # summaries summary_writer = tf.contrib.summary.create_file_writer( args.job_dir, max_queue=1, flush_millis=1000) summary_writer.set_as_default()", "already exist if not os.path.exists(args.job_dir): os.makedirs(args.job_dir) # enable eager execution tf.enable_eager_execution() # set", "(eval): {}'.format( loss_train.result().numpy(), loss_test.result().numpy())) checkpoint_prefix = os.path.join(args.job_dir, 'ckpt') checkpoint.save(checkpoint_prefix) if __name__ == '__main__':", "tfe.metrics.Mean(name='loss/eval') for images, labels in dataset_test: outputs_dist, z_dist, z = model(images, labels) loss", "Params: \"\"\" Container for hyperparameters. \"\"\" learning_rate = attr.ib(default=1e-3) epochs = attr.ib(default=100) batch_size", "job directory if it doesn't already exist if not os.path.exists(args.job_dir): os.makedirs(args.job_dir) # enable", "inputs_loc=images_loc, inputs_scale=images_scale, inputs_shape=[28, 28, 1]) latent_prior = tfp.distributions.MultivariateNormalDiag( loc=tf.zeros(shape=[2], dtype=tf.float32), scale_identity_multiplier=1.0) # checkpoints", "argparse.ArgumentParser() parser.add_argument('--job-dir', required=True) parser.add_argument('--seed', default=67, type=int) args = parser.parse_args() print('args:', args) # create", "MNIST dataset ((images_train, labels_train), (images_test, labels_test)) = tf.keras.datasets.mnist.load_data() # prepare the images by", "prep_images, get_dataset from vae.model import Model @attr.s class Params: \"\"\" Container for hyperparameters.", "images, labels in dataset_train: with tf.GradientTape() as tape: outputs_dist, z_dist, z = model(", "dataset_train: with tf.GradientTape() as tape: outputs_dist, z_dist, z = model( images, labels, training=True)", "required=True) parser.add_argument('--seed', default=67, type=int) args = parser.parse_args() print('args:', args) # create a job", "images_train.mean() images_scale = images_train.std() # define datasets for sampling batches dataset_train = get_dataset(", "= argparse.ArgumentParser() parser.add_argument('--job-dir', required=True) parser.add_argument('--seed', default=67, type=int) args = parser.parse_args() print('args:', args) #", "loc=tf.zeros(shape=[2], dtype=tf.float32), scale_identity_multiplier=1.0) # checkpoints checkpoint = tf.train.Checkpoint( optimizer=optimizer, model=model, global_step=global_step) checkpoint_path =", "scale_identity_multiplier=1.0) # checkpoints checkpoint = tf.train.Checkpoint( optimizer=optimizer, model=model, global_step=global_step) checkpoint_path = tf.train.latest_checkpoint(args.job_dir) if", "(train): {}, loss (eval): {}'.format( loss_train.result().numpy(), loss_test.result().numpy())) checkpoint_prefix = os.path.join(args.job_dir, 'ckpt') checkpoint.save(checkpoint_prefix) if", "def main(): parser = argparse.ArgumentParser() parser.add_argument('--job-dir', required=True) parser.add_argument('--seed', default=67, type=int) args = parser.parse_args()", "hyperparameters params = Params() print('params:', params) # load MNIST dataset ((images_train, labels_train), (images_test,", "tensor=images, max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/train', tensor=outputs_dist.mean(), max_images=1, step=global_step) loss_test = tfe.metrics.Mean(name='loss/eval') for images,", "flush_millis=1000) summary_writer.set_as_default() with trange(params.epochs) as pbar: for epoch in pbar: loss_train = tfe.metrics.Mean(name='loss/train')", "as tape: outputs_dist, z_dist, z = model( images, labels, training=True) loss = losses.variational(outputs_dist,", "datasets for sampling batches dataset_train = get_dataset( (images_train, labels_train), batch_size=params.batch_size, shuffle=True) dataset_test =", "images, latent_prior) loss_train(loss) grads = tape.gradient(loss, model.trainable_variables) grads_and_vars = zip(grads, model.trainable_variables) optimizer.apply_gradients( grads_and_vars,", "define hyperparameters params = Params() print('params:', params) # load MNIST dataset ((images_train, labels_train),", "@attr.s class Params: \"\"\" Container for hyperparameters. \"\"\" learning_rate = attr.ib(default=1e-3) epochs =", "grads = tape.gradient(loss, model.trainable_variables) grads_and_vars = zip(grads, model.trainable_variables) optimizer.apply_gradients( grads_and_vars, global_step=global_step) with tf.contrib.summary.always_record_summaries():", "import losses from vae.data import prep_images, get_dataset from vae.model import Model @attr.s class", "losses from vae.data import prep_images, get_dataset from vae.model import Model @attr.s class Params:", "from the training set images_loc = images_train.mean() images_scale = images_train.std() # define datasets", "images_test = prep_images(images_test) # compute statistics from the training set images_loc = images_train.mean()", "prepare the images by casting and rescaling images_train = prep_images(images_train) images_test = prep_images(images_test)", "in dataset_train: with tf.GradientTape() as tape: outputs_dist, z_dist, z = model( images, labels,", "not os.path.exists(args.job_dir): os.makedirs(args.job_dir) # enable eager execution tf.enable_eager_execution() # set random seeds for", "batches dataset_train = get_dataset( (images_train, labels_train), batch_size=params.batch_size, shuffle=True) dataset_test = get_dataset( (images_test, labels_test),", "training=True) loss = losses.variational(outputs_dist, z_dist, images, latent_prior) loss_train(loss) grads = tape.gradient(loss, model.trainable_variables) grads_and_vars", "in pbar: loss_train = tfe.metrics.Mean(name='loss/train') for images, labels in dataset_train: with tf.GradientTape() as", "tf.contrib.summary.image( name='outputs/eval', tensor=outputs_dist.mean(), max_images=1, step=global_step) pbar.set_description('loss (train): {}, loss (eval): {}'.format( loss_train.result().numpy(), loss_test.result().numpy()))", "import tensorflow as tf import tensorflow.contrib.eager as tfe import tensorflow_probability as tfp from", "loss (eval): {}'.format( loss_train.result().numpy(), loss_test.result().numpy())) checkpoint_prefix = os.path.join(args.job_dir, 'ckpt') checkpoint.save(checkpoint_prefix) if __name__ ==", "get_dataset( (images_train, labels_train), batch_size=params.batch_size, shuffle=True) dataset_test = get_dataset( (images_test, labels_test), batch_size=params.batch_size) # model", "import tensorflow_probability as tfp from tqdm import trange from vae import losses from", "os import attr import random import argparse import numpy as np import tensorflow", "tf.contrib.summary.scalar( name='grad_norm', tensor=tf.global_norm(grads)) tf.contrib.summary.image( name='image/train', tensor=images, max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/train', tensor=outputs_dist.mean(), max_images=1, step=global_step)", "{}, loss (eval): {}'.format( loss_train.result().numpy(), loss_test.result().numpy())) checkpoint_prefix = os.path.join(args.job_dir, 'ckpt') checkpoint.save(checkpoint_prefix) if __name__", "Model @attr.s class Params: \"\"\" Container for hyperparameters. \"\"\" learning_rate = attr.ib(default=1e-3) epochs", "28, 1]) latent_prior = tfp.distributions.MultivariateNormalDiag( loc=tf.zeros(shape=[2], dtype=tf.float32), scale_identity_multiplier=1.0) # checkpoints checkpoint = tf.train.Checkpoint(", "dtype=tf.float32), scale_identity_multiplier=1.0) # checkpoints checkpoint = tf.train.Checkpoint( optimizer=optimizer, model=model, global_step=global_step) checkpoint_path = tf.train.latest_checkpoint(args.job_dir)", "pbar: for epoch in pbar: loss_train = tfe.metrics.Mean(name='loss/train') for images, labels in dataset_train:", "= tfe.metrics.Mean(name='loss/train') for images, labels in dataset_train: with tf.GradientTape() as tape: outputs_dist, z_dist,", "tfe.metrics.Mean(name='loss/train') for images, labels in dataset_train: with tf.GradientTape() as tape: outputs_dist, z_dist, z", "parser.add_argument('--seed', default=67, type=int) args = parser.parse_args() print('args:', args) # create a job directory", "(images_test, labels_test)) = tf.keras.datasets.mnist.load_data() # prepare the images by casting and rescaling images_train", "global_step=global_step) with tf.contrib.summary.always_record_summaries(): loss_train.result() tf.contrib.summary.scalar( name='grad_norm', tensor=tf.global_norm(grads)) tf.contrib.summary.image( name='image/train', tensor=images, max_images=1, step=global_step) tf.contrib.summary.image(", "from vae.data import prep_images, get_dataset from vae.model import Model @attr.s class Params: \"\"\"", "labels in dataset_train: with tf.GradientTape() as tape: outputs_dist, z_dist, z = model( images,", "if not os.path.exists(args.job_dir): os.makedirs(args.job_dir) # enable eager execution tf.enable_eager_execution() # set random seeds", "outputs_dist, z_dist, z = model(images, labels) loss = losses.variational(outputs_dist, z_dist, images, latent_prior) loss_test(loss)", "max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/eval', tensor=outputs_dist.mean(), max_images=1, step=global_step) pbar.set_description('loss (train): {}, loss (eval): {}'.format(", "inputs_scale=images_scale, inputs_shape=[28, 28, 1]) latent_prior = tfp.distributions.MultivariateNormalDiag( loc=tf.zeros(shape=[2], dtype=tf.float32), scale_identity_multiplier=1.0) # checkpoints checkpoint", "losses.variational(outputs_dist, z_dist, images, latent_prior) loss_train(loss) grads = tape.gradient(loss, model.trainable_variables) grads_and_vars = zip(grads, model.trainable_variables)", "# prepare the images by casting and rescaling images_train = prep_images(images_train) images_test =", "tf.train.latest_checkpoint(args.job_dir) if checkpoint_path is not None: checkpoint.restore(checkpoint_path).assert_consumed() # summaries summary_writer = tf.contrib.summary.create_file_writer( args.job_dir,", "shuffle=True) dataset_test = get_dataset( (images_test, labels_test), batch_size=params.batch_size) # model / optimization global_step =", "tfp.distributions.MultivariateNormalDiag( loc=tf.zeros(shape=[2], dtype=tf.float32), scale_identity_multiplier=1.0) # checkpoints checkpoint = tf.train.Checkpoint( optimizer=optimizer, model=model, global_step=global_step) checkpoint_path", "as tfe import tensorflow_probability as tfp from tqdm import trange from vae import", "= images_train.mean() images_scale = images_train.std() # define datasets for sampling batches dataset_train =", "the images by casting and rescaling images_train = prep_images(images_train) images_test = prep_images(images_test) #", "images, latent_prior) loss_test(loss) with tf.contrib.summary.always_record_summaries(): loss_test.result() tf.contrib.summary.image( name='image/eval', tensor=images, max_images=1, step=global_step) tf.contrib.summary.image( name='outputs/eval'," ]
[ "The 2D midpoints are converted into 3D midpoints since # it is easier", "an example how to use this function. >>> import base64 >>> from possum", "((float, float, float), (float, float, float)), ...} And now it it a time", "the vtk module is not loaded. :param point_list: List of points to turn", "structure or outside the structure. Therefore some of the results may look wired", "the middle midpoints of the labelled imags. The term 'middle midpoints' is used", "segmentation # 4) Pick the maximum of the distance transform for given segmentation", "vtk.vtkVersion() except: return None n_points = len(points_list.keys()) points = vtk.vtkPoints() vertices = vtk.vtkCellArray()", "in a slightly different way for # two dimensional results and slightly different", "and to have a dimensionality of two or three. Images having different properties", "midpoints of the labels in the image. :rtype: {int: ((float, float, float), (float,", "which has been removed. The overall idea of this loop is to: 1)", "voxels should be returned, but no. It is unknown is such centre would", "midpoints[1] == ((5.0, 0.0, 0.0), (5, 0, 0)) True >>> type(midpoints[30][0][1]) == type(1)", "is more than one pixels with the maximum value of the distance transform,", "type(midpoints[30][1][1]) == type(1) True >>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) == type(1.0)", "the distance transform, location of the first one is returned. One could think", "itk.LabelGeometryImageFilter[(t_label_img, t_label_img)].New() unique_labels.SetInput(itk_image) unique_labels.CalculatePixelIndicesOff() unique_labels.Update() # This is where we'll collect the results.", "everyday # use. # print middle_points.__repr__() return middle_points def points_to_vtk_points(points_list): \"\"\" The function", "to be of `uchar` or `ushort` type, to have a single component and", "size and dimensionality as the labelled image. # The differe is in data", "points :type point_list: {int: ((float, float, float), (float, float, float)), ...} :return: Midpoints", "0)) True >>> type(midpoints[30][1][1]) == type(1) True >>> type(midpoints[30][0][1]) == type(1) False >>>", "# which are apparently incomparibile with python in type. # Consider it a", "a lot of explicit casting assure types # compatibility. The 2D midpoints are", ">>> midpoints[30] == ((0.0, 39.0, 0), (0, 39, 0)) True >>> type(midpoints[30][1][1]) ==", "labelled image in which individual discrete values correspond to individual structures. Formally this", "is not the centroids what is calculated here. Anyway, this function calculated middle", "three. Images having different properties will not be processed. :type itk_image: `itk.Image` :return:", "really important for everyday # use. # print middle_points.__repr__() return middle_points def points_to_vtk_points(points_list):", "structure. Therefore some of the results may look wired but they are actually", "are returned as longints # which are apparently incomparibile with python in type.", "image only unsigned_char\\ and unsigned_short are accepted.\") # t_label_img is the ITK image", "results. We collect, both, the physical # location as well as the middle_points", "map(int, midpoints[1][0]) == [14, 0, 0] True >>> map(int, midpoints[21][0]) == [14, 24,", "= \\ itk.SignedMaurerDistanceMapImageFilter[ (t_label_img, t_float_img)].New() distance_transform.SetInput(largest_patch.GetOutput()) distance_transform.InsideIsPositiveOn() distance_transform.Update() centroid = itk.MinimumMaximumImageCalculator[t_float_img].New() centroid.SetImage(distance_transform.GetOutput()) centroid.Compute()", "import os import sys import itk from possum import pos_itk_core from possum import", "might be multiple disjoint regions colored with given label 3) Apply the distance", "used in filters # templates. t_label_img = itk_image.__class__ # We'll be also using", ">>> len(midpoints.keys()) == 63 True >>> str(midpoints.get(0,None)) == \"None\" True >>> midpoints[1] ==", "for a labelled image only unsigned_char\\ and unsigned_short are accepted.\") # t_label_img is", "1) Extract given label from the segmentation 2) Extract the largest patch of", "a different # way. Our center midpoints cannot be called centroids. for label_idx", "32, 33] True >>> map(int, midpoints[1][0]) == [14, 0, 0] True >>> map(int,", "((15.0, 15.0, 15.0), (15, 15, 15)) True >>> midpoints[111] == ((5.0, 5.0, 9.0),", "List of points to turn into vtk points :type point_list: {int: ((float, float,", "centre of a particular structure. ... note :: This function will not work", "label_type = \\ pos_itk_core.io_image_type_to_component_string_name[ itk_image.__class__] # Extract the details of the image provided", "os import sys import itk from possum import pos_itk_core from possum import pos_itk_transforms", "You might think that we're calculating centroids here, but not. I use the", "colored with given label # 3) Apply the distance transform to the largest", "it is easier to use them in vtk if they're 3D midpoints. if", "vtk.vtkPoints() vertices = vtk.vtkCellArray() id_array = vtk.vtkUnsignedCharArray() id_array.SetName(\"Label_ID\") id_array.SetNumberOfComponents(1) id_array.SetNumberOfTuples(n_points) for (i, (pt,", "actually ok. :param itk_image: Labelled image, the image is expected to be a", ":return: Midpoints of the individual structures expressed as vtk.vtkPolyData() :rtype: `vtk.vtkPolyData` \"\"\" try:", "point_list: List of points to turn into vtk points :type point_list: {int: ((float,", "a slightly different way for # two dimensional results and slightly different for", "will be disabled. \"\"\" def calculate_labels_midpoints(itk_image): \"\"\" This function introduces a workflow for", ">>> open(input_filename, \"w\").write(base64.decodestring(example_two_dimensions)) >>> itk_image = pos_itk_transforms.read_itk_image(input_filename) >>> midpoints = calculate_labels_midpoints(itk_image) >>> sorted(midpoints.keys())", "in this module require vtk module to be installed. If it is not", "13, 20, 21, 22, 23, 30, 31, 32, 33] True >>> map(int, midpoints[1][0])", "== 1, \\ \"Only single component images are allowed.\" assert data_type in [\"unsigned_char\",", "39, 0)) True >>> type(midpoints[30][1][1]) == type(1) True >>> type(midpoints[30][0][1]) == type(1) False", "segmentation # 2) Extract the largest patch of the segmentation as there #", "= itk.MinimumMaximumImageCalculator[t_float_img].New() centroid.SetImage(distance_transform.GetOutput()) centroid.Compute() centroid.GetIndexOfMaximum() index = centroid.GetIndexOfMaximum() point = itk_image.TransformIndexToPhysicalPoint(index) # We", "since # it is easier to use them in vtk if they're 3D", "loaded. :param point_list: List of points to turn into vtk points :type point_list:", "calculate_labels_midpoints(itk_image) >>> os.remove(input_filename) >>> str(type(midpoints)) == \"<type 'dict'>\" True >>> len(midpoints.keys()) == 63", "map(int, index) middle_points[label_idx] = (tuple(point), tuple(index)) # Below there is some debugging code.", "== type(1) False >>> type(midpoints[30][0][1]) == type(1.0) True >>> os.remove(input_filename) Now we will", "imags. The term 'middle midpoints' is used on purpose. You might think that", "== type(1) True >>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) == type(1.0) True", "the available labels returned by itk # as sometimes strange things happen and", "label image label_type = \\ pos_itk_core.io_image_type_to_component_string_name[ itk_image.__class__] # Extract the details of the", "try: available_labels.remove(C_BACKGROUND_LABEL_IDX) except: pass # Now iterate over all available labels except the", "# The purpose of the filter below is to define the unique labels", "the segmentation # 2) Extract the largest patch of the segmentation as there", "structures expressed as vtk.vtkPolyData() :rtype: `vtk.vtkPolyData` \"\"\" try: vtk.vtkVersion() except: return None n_points", "unit testing. Please also consited this set of unittests as an example how", "unittests as an example how to use this function. >>> import base64 >>>", "filter below is to define the unique labels # given segmentation contains. unique_labels", "as an example how to use this function. >>> import base64 >>> from", "== [1, 2, 3, 10, 11, 12, 13, 20, 21, 22, 23, 30,", "# 3) Apply the distance transform to the largest path with # given", "type(1) False >>> type(midpoints[30][0][1]) == type(1.0) True >>> os.remove(input_filename) Now we will try", "correspond to individual structures. Formally this means that the image has to be", ">>> map(int, midpoints[21][0]) == [14, 24, 0] True >>> midpoints[30] == ((0.0, 39.0,", "sys import itk from possum import pos_itk_core from possum import pos_itk_transforms from possum.pos_common", "= {} # We have to map the available labels returned by itk", "position of the first (index-wise) voxel with the maimum value. This means that", "the distance transform to the largest path with given segmentation 4) Pick the", "time to do some unit testing. Please also consited this set of unittests", "Not really important for everyday # use. # print middle_points.__repr__() return middle_points def", ">>> midpoints = calculate_labels_midpoints(itk_image) >>> sorted(midpoints.keys()) == [1, 2, 3, 10, 11, 12,", "midpoints = calculate_labels_midpoints(itk_image) >>> sorted(midpoints.keys()) == [1, 2, 3, 10, 11, 12, 13,", "possum.pos_common import r \"\"\" .. note:: Some of the non-cruical, optional functions in", "pos_itk_core.io_image_type_to_component_string_name[ itk_image.__class__] # Extract the details of the image provided and check if", "C_BACKGROUND_LABEL_IDX try: available_labels.remove(C_BACKGROUND_LABEL_IDX) except: pass # Now iterate over all available labels except", "= map(int, unique_labels.GetLabels()) # Now we need to remove the background label (if", "The overall idea of this loop is to: 1) Extract given label from", "a 3D image >>> example_three_dimensions=\"<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_three_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_three_dimensions)) >>> itk_image =", "details of the image provided and check if they are # ok to", "expressed as vtk.vtkPolyData() :rtype: `vtk.vtkPolyData` \"\"\" try: vtk.vtkVersion() except: return None n_points =", "value of the distance transform, location of the first one is returned. One", "of the distance transform for given segmentation # and by this define the", "label. .. note :: Please have in ming that this procedure returns position", "into a vtkPolyData structure and assigns appropriate label IDs to the individual points", "index = map(int, index) + [0] if n_dim == 3: point = map(float,", "point' of given label. .. note :: Please have in ming that this", "filters # templates. t_label_img = itk_image.__class__ # We'll be also using another image", "== 3: point = map(float, point) index = map(int, index) middle_points[label_idx] = (tuple(point),", "purpose. You might think that we're calculating centroids here, but not. I use", "another image type. This one is identical # in terms of size and", "expected to be a labelled image in which individual discrete values correspond to", ">>> example_two_dimensions='<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_two_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_two_dimensions)) >>> itk_image = pos_itk_transforms.read_itk_image(input_filename) >>> midpoints", "This is where we'll collect the results. We collect, both, the physical #", "the structure. Therefore some of the results may look wired but they are", "calculated middle midpoints of labels in the provided image. The midpoints are calculated", "= \\ itk.LabelShapeKeepNObjectsImageFilter[t_label_img].New() largest_patch.SetInput(patches.GetOutput()) largest_patch.SetBackgroundValue(0) largest_patch.SetNumberOfObjects(1) largest_patch.SetAttribute(100) largest_patch.Update() distance_transform = \\ itk.SignedMaurerDistanceMapImageFilter[ (t_label_img,", "processed. :type itk_image: `itk.Image` :return: Middle midpoints of the labels in the image.", "some unit testing. Please also consited this set of unittests as an example", "provided image. The midpoints are calculated in the following way: Now iterate over", "processed in a slightly different way for # two dimensional results and slightly", "turn into vtk points :type point_list: {int: ((float, float, float), (float, float, float)),", "this define the 'middle point' of given label. .. note :: Please have", "except: return None n_points = len(points_list.keys()) points = vtk.vtkPoints() vertices = vtk.vtkCellArray() id_array", "of labels in the provided image. The midpoints are calculated in the following", "this module require vtk module to be installed. If it is not available", "that if there is more than one pixels with the maximum value of", "a single component and to have a dimensionality of two or three. Images", "'dict'>\" True >>> len(midpoints.keys()) == 63 True >>> str(midpoints.get(0,None)) == \"None\" True >>>", "\"\"\" This function introduces a workflow for calculating the middle midpoints of the", "data type and number of components # of the label image label_type =", "== 63 True >>> str(midpoints.get(0,None)) == \"None\" True >>> midpoints[1] == ((5.0, 0.0,", "pos_itk_transforms from possum.pos_common import r \"\"\" .. note:: Some of the non-cruical, optional", "pos_itk_core from possum import pos_itk_transforms from possum.pos_common import r \"\"\" .. note:: Some", "Extract given label from the segmentation 2) Extract the largest patch of the", "the largest patch of the segmentation as there # might be multiple disjoint", "will try to process a 3D image >>> example_three_dimensions=\"<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_three_dimensions.nii.gz\" >>> open(input_filename,", "\"Only single component images are allowed.\" assert data_type in [\"unsigned_char\", \"unsigned_short\"], \\ r(\"Incorrect", "ok to use in the routine. n_dim = len(itk_image.GetLargestPossibleRegion().GetSize()) number_of_components = itk_image.GetNumberOfComponentsPerPixel() data_type", "be used in filters # templates. t_label_img = itk_image.__class__ # We'll be also", "0] True >>> midpoints[30] == ((0.0, 39.0, 0), (0, 39, 0)) True >>>", "unsigned_short are accepted.\") # t_label_img is the ITK image type class to be", "patches.Update() largest_patch = \\ itk.LabelShapeKeepNObjectsImageFilter[t_label_img].New() largest_patch.SetInput(patches.GetOutput()) largest_patch.SetBackgroundValue(0) largest_patch.SetNumberOfObjects(1) largest_patch.SetAttribute(100) largest_patch.Update() distance_transform = \\", "#!/usr/bin/env python # encoding: utf-8 import os import sys import itk from possum", "be also using another image type. This one is identical # in terms", "type(midpoints[30][0][1]) == type(1.0) True >>> midpoints[183] == ((15.0, 15.0, 15.0), (15, 15, 15))", "[0] if n_dim == 3: point = map(float, point) index = map(int, index)", "itk_image.GetNumberOfComponentsPerPixel() data_type = label_type[1] assert n_dim in [2, 3], \\ \"Incorrect dimensionality.\" assert", "as centroids # are something different and they are calculated in a different", "= itk_image.__class__ # We'll be also using another image type. This one is", "(i, (pt, idx)) in points_list.items(): id_ = points.InsertNextPoint(pt) vertices.InsertNextCell(1) vertices.InsertCellPoint(id_) id_array.SetTuple1(id_, i) point", "term 'middle midpoints' as it is not the centroids what is calculated here.", "given label from the segmentation # 2) Extract the largest patch of the", "= (tuple(point), tuple(index)) # Below there is some debugging code. Not really important", "type(1) True >>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) == type(1.0) True >>>", "# Below there is some debugging code. Not really important for everyday #", "20, 21, 22, 23, 30, 31, 32, 33] True >>> map(int, midpoints[1][0]) ==", "type(1) False >>> type(midpoints[30][0][1]) == type(1.0) True >>> midpoints[183] == ((15.0, 15.0, 15.0),", "identical # in terms of size and dimensionality as the labelled image. #", "for label_idx in available_labels: extract_label = \\ itk.BinaryThresholdImageFilter[ (t_label_img, t_label_img)].New() extract_label.SetInput(itk_image) extract_label.SetUpperThreshold(label_idx) extract_label.SetLowerThreshold(label_idx)", "labels returned by itk # as sometimes strange things happen and they are", "True >>> len(midpoints.keys()) == 63 True >>> str(midpoints.get(0,None)) == \"None\" True >>> midpoints[1]", "= vtk.vtkCellArray() id_array = vtk.vtkUnsignedCharArray() id_array.SetName(\"Label_ID\") id_array.SetNumberOfComponents(1) id_array.SetNumberOfTuples(n_points) for (i, (pt, idx)) in", "of the non-cruical, optional functions in this module require vtk module to be", "the unique labels # given segmentation contains. unique_labels = \\ itk.LabelGeometryImageFilter[(t_label_img, t_label_img)].New() unique_labels.SetInput(itk_image)", "image has to be of `uchar` or `ushort` type, to have a single", "well. float_type = list(label_type) float_type[1] = \"float\" t_float_img = \\ pos_itk_core.io_component_string_name_to_image_type[tuple(float_type)] # The", "into 3D midpoints since # it is easier to use them in vtk", "there might be multiple disjoint regions colored with given label 3) Apply the", "dimensionality, data type and number of components # of the label image label_type", "this set of unittests as an example how to use this function. >>>", "module to be installed. If it is not available the VTK support will", "be multiple disjoint regions colored with given label 3) Apply the distance transform", "things happen and they are returned as longints # which are apparently incomparibile", "are calculated in a different # way. Our center midpoints cannot be called", "calculating centroids here, but not. I use the term 'middle midpoints' as it", "and by this define the 'middle point' of given label # I call", "vtk if they're 3D midpoints. if n_dim == 2: point = map(float, point)", "the background label which has been removed. The overall idea of this loop", "label # 3) Apply the distance transform to the largest path with #", "consited this set of unittests as an example how to use this function.", "== [14, 24, 0] True >>> midpoints[30] == ((0.0, 39.0, 0), (0, 39,", "largest_patch.SetBackgroundValue(0) largest_patch.SetNumberOfObjects(1) largest_patch.SetAttribute(100) largest_patch.Update() distance_transform = \\ itk.SignedMaurerDistanceMapImageFilter[ (t_label_img, t_float_img)].New() distance_transform.SetInput(largest_patch.GetOutput()) distance_transform.InsideIsPositiveOn() distance_transform.Update()", "images are allowed.\" assert data_type in [\"unsigned_char\", \"unsigned_short\"], \\ r(\"Incorrect data type for", "have a single component and to have a dimensionality of two or three.", "point.SetPoints(points) point.SetVerts(vertices) point.GetPointData().AddArray(id_array) return point if __name__ == '__main__': import doctest print doctest.testmod(verbose=True)", "the maimum value. This means that if there is more than one pixels", "0.0, 0.0), (5, 0, 0)) True >>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1])", "== \"None\" True >>> midpoints[1] == ((5.0, 0.0, 0.0), (5, 0, 0)) True", "define the 'middle point' of given label. .. note :: Please have in", "index = map(int, index) middle_points[label_idx] = (tuple(point), tuple(index)) # Below there is some", "label_type[1] assert n_dim in [2, 3], \\ \"Incorrect dimensionality.\" assert number_of_components == 1,", "lot of explicit casting assure types # compatibility. The 2D midpoints are converted", "distance transform to the largest path with given segmentation 4) Pick the maximum", "be a labelled image in which individual discrete values correspond to individual structures.", "of `uchar` or `ushort` type, to have a single component and to have", "extract_label.SetLowerThreshold(label_idx) extract_label.SetOutsideValue(0) extract_label.SetInsideValue(1) extract_label.Update() patches = \\ itk.ConnectedComponentImageFilter[ (t_label_img, t_label_img)].New() patches.SetInput(extract_label.GetOutput()) patches.Update() largest_patch", "== [14, 0, 0] True >>> map(int, midpoints[21][0]) == [14, 24, 0] True", "for given segmentation and by this define the 'middle point' of given label.", "returned. One could think that probably a centre of mass of the max", "some of the results may look wired but they are actually ok. :param", "ming that this procedure returns position of the first (index-wise) voxel with the", "the results may look wired but they are actually ok. :param itk_image: Labelled", "(t_label_img, t_float_img)].New() distance_transform.SetInput(largest_patch.GetOutput()) distance_transform.InsideIsPositiveOn() distance_transform.Update() centroid = itk.MinimumMaximumImageCalculator[t_float_img].New() centroid.SetImage(distance_transform.GetOutput()) centroid.Compute() centroid.GetIndexOfMaximum() index =", "The term 'middle midpoints' is used on purpose. You might think that we're", "for (i, (pt, idx)) in points_list.items(): id_ = points.InsertNextPoint(pt) vertices.InsertNextCell(1) vertices.InsertCellPoint(id_) id_array.SetTuple1(id_, i)", ">>> midpoints[1] == ((5.0, 0.0, 0.0), (5, 0, 0)) True >>> type(midpoints[30][0][1]) ==", "also consited this set of unittests as an example how to use this", "\\ pos_itk_core.io_image_type_to_component_string_name[ itk_image.__class__] # Extract the details of the image provided and check", "(float, float, float)), ...} And now it it a time to do some", "largest patch of the segmentation as there # might be multiple disjoint regions", "the term 'middle midpoints' as it is not the centroids what is calculated", "work if the vtk module is not loaded. :param point_list: List of points", "3) Apply the distance transform to the largest path with # given segmentation", "different way for # two dimensional results and slightly different for 3D resuls:", "n_dim = len(itk_image.GetLargestPossibleRegion().GetSize()) number_of_components = itk_image.GetNumberOfComponentsPerPixel() data_type = label_type[1] assert n_dim in [2,", "not available the VTK support will be disabled. \"\"\" def calculate_labels_midpoints(itk_image): \"\"\" This", "vtk.vtkPolyData() :rtype: `vtk.vtkPolyData` \"\"\" try: vtk.vtkVersion() except: return None n_points = len(points_list.keys()) points", "id_array.SetNumberOfTuples(n_points) for (i, (pt, idx)) in points_list.items(): id_ = points.InsertNextPoint(pt) vertices.InsertNextCell(1) vertices.InsertCellPoint(id_) id_array.SetTuple1(id_,", "float)), ...} :return: Midpoints of the individual structures expressed as vtk.vtkPolyData() :rtype: `vtk.vtkPolyData`", "over all available labels except the background label which # has been removed.", "function. >>> import base64 >>> from possum import pos_itk_transforms >>> example_two_dimensions='<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_two_dimensions.nii.gz\"", "data type: this one has to be float to handle # the distance", "the label image label_type = \\ pos_itk_core.io_image_type_to_component_string_name[ itk_image.__class__] # Extract the details of", "assure types # compatibility. The 2D midpoints are converted into 3D midpoints since", "support will be disabled. \"\"\" def calculate_labels_midpoints(itk_image): \"\"\" This function introduces a workflow", "debugging code. Not really important for everyday # use. # print middle_points.__repr__() return", "itk.MinimumMaximumImageCalculator[t_float_img].New() centroid.SetImage(distance_transform.GetOutput()) centroid.Compute() centroid.GetIndexOfMaximum() index = centroid.GetIndexOfMaximum() point = itk_image.TransformIndexToPhysicalPoint(index) # We need", "may look wired but they are actually ok. :param itk_image: Labelled image, the", "unique labels # given segmentation contains. unique_labels = \\ itk.LabelGeometryImageFilter[(t_label_img, t_label_img)].New() unique_labels.SetInput(itk_image) unique_labels.CalculatePixelIndicesOff()", "distance_transform.Update() centroid = itk.MinimumMaximumImageCalculator[t_float_img].New() centroid.SetImage(distance_transform.GetOutput()) centroid.Compute() centroid.GetIndexOfMaximum() index = centroid.GetIndexOfMaximum() point = itk_image.TransformIndexToPhysicalPoint(index)", "for calculating the middle midpoints of the labelled imags. The term 'middle midpoints'", "= \\ itk.ConnectedComponentImageFilter[ (t_label_img, t_label_img)].New() patches.SetInput(extract_label.GetOutput()) patches.Update() largest_patch = \\ itk.LabelShapeKeepNObjectsImageFilter[t_label_img].New() largest_patch.SetInput(patches.GetOutput()) largest_patch.SetBackgroundValue(0)", "distance transform for given segmentation and by this define the 'middle point' of", "\"\"\" .. note:: Some of the non-cruical, optional functions in this module require", "type(1.0) True >>> midpoints[183] == ((15.0, 15.0, 15.0), (15, 15, 15)) True >>>", "data type for a labelled image only unsigned_char\\ and unsigned_short are accepted.\") #", "21, 22, 23, 30, 31, 32, 33] True >>> map(int, midpoints[1][0]) == [14,", "center midpoints cannot be called centroids. for label_idx in available_labels: extract_label = \\", "only unsigned_char\\ and unsigned_short are accepted.\") # t_label_img is the ITK image type", "type(1.0) True >>> os.remove(input_filename) Now we will try to process a 3D image", "image >>> example_three_dimensions=\"<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_three_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_three_dimensions)) >>> itk_image = pos_itk_transforms.read_itk_image(input_filename) >>>", "float)), ...} And now it it a time to do some unit testing.", "maximum value of the distance transform, location of the first one is returned.", "the ITK image type class to be used in filters # templates. t_label_img", "points_to_vtk_points(points_list): \"\"\" The function converts the location of the middle points into a", "function converts the location of the middle points into a vtkPolyData structure and", "than one pixels with the maximum value of the distance transform, location of", "Now we need to remove the background label (if such # label actually", "loop is to: # 1) Extract given label from the segmentation # 2)", "cannot be called centroids. for label_idx in available_labels: extract_label = \\ itk.BinaryThresholdImageFilter[ (t_label_img,", "is the ITK image type class to be used in filters # templates.", "# ok to use in the routine. n_dim = len(itk_image.GetLargestPossibleRegion().GetSize()) number_of_components = itk_image.GetNumberOfComponentsPerPixel()", "= map(int, index) + [0] if n_dim == 3: point = map(float, point)", "midpoints are calculated in the following way: Now iterate over all available labels", "one is returned. One could think that probably a centre of mass of", "as longints # which are apparently incomparibile with python in type. # Consider", "calculated in the following way: Now iterate over all available labels except the", "a labelled image only unsigned_char\\ and unsigned_short are accepted.\") # t_label_img is the", "multiple disjoint regions colored with given label # 3) Apply the distance transform", "is some debugging code. Not really important for everyday # use. # print", "centroids. for label_idx in available_labels: extract_label = \\ itk.BinaryThresholdImageFilter[ (t_label_img, t_label_img)].New() extract_label.SetInput(itk_image) extract_label.SetUpperThreshold(label_idx)", "as the labelled image. # The differe is in data type: this one", "data_type in [\"unsigned_char\", \"unsigned_short\"], \\ r(\"Incorrect data type for a labelled image only", "# are something different and they are calculated in a different # way.", "midpoints[21][0]) == [14, 24, 0] True >>> midpoints[30] == ((0.0, 39.0, 0), (0,", "first (index-wise) voxel with the maimum value. This means that if there is", "having different properties will not be processed. :type itk_image: `itk.Image` :return: Middle midpoints", "actually exists) C_BACKGROUND_LABEL_IDX try: available_labels.remove(C_BACKGROUND_LABEL_IDX) except: pass # Now iterate over all available", "11, 12, 13, 20, 21, 22, 23, 30, 31, 32, 33] True >>>", "the 'middle point' of given label # I call the midpoints 'middle midpoints'", "type for a labelled image only unsigned_char\\ and unsigned_short are accepted.\") # t_label_img", "the largest patch of the segmentation as there might be multiple disjoint regions", "iterate over all available labels except the background label which has been removed.", "optional functions in this module require vtk module to be installed. If it", "way: Now iterate over all available labels except the background label which has", "Apply the distance transform to the largest path with # given segmentation #", "a vtkPolyData structure and assigns appropriate label IDs to the individual points of", "they are calculated in a different # way. Our center midpoints cannot be", "might be multiple disjoint regions colored with given label # 3) Apply the", "to the largest path with # given segmentation # 4) Pick the maximum", "a time to do some unit testing. Please also consited this set of", "float), (float, float, float)), ...} :return: Midpoints of the individual structures expressed as", "transform well. float_type = list(label_type) float_type[1] = \"float\" t_float_img = \\ pos_itk_core.io_component_string_name_to_image_type[tuple(float_type)] #", "'middle midpoints' is used on purpose. You might think that we're calculating centroids", "midpoints 'middle midpoints' not centroids as centroids # are something different and they", "the labelled imags. The term 'middle midpoints' is used on purpose. You might", ":rtype: `vtk.vtkPolyData` \"\"\" try: vtk.vtkVersion() except: return None n_points = len(points_list.keys()) points =", "in a different # way. Our center midpoints cannot be called centroids. for", "the maximum value of the distance transform, location of the first one is", "to be a labelled image in which individual discrete values correspond to individual", "\\ \"Only single component images are allowed.\" assert data_type in [\"unsigned_char\", \"unsigned_short\"], \\", "`uchar` or `ushort` type, to have a single component and to have a", "of the labelled imags. The term 'middle midpoints' is used on purpose. You", "12, 13, 20, 21, 22, 23, 30, 31, 32, 33] True >>> map(int,", "point) + [0] index = map(int, index) + [0] if n_dim == 3:", "open(input_filename, \"w\").write(base64.decodestring(example_three_dimensions)) >>> itk_image = pos_itk_transforms.read_itk_image(input_filename) >>> midpoints = calculate_labels_midpoints(itk_image) >>> os.remove(input_filename) >>>", "\\ pos_itk_core.io_component_string_name_to_image_type[tuple(float_type)] # The purpose of the filter below is to define the", "a centre of mass of the max voxels should be returned, but no.", "the middle_points = {} # We have to map the available labels returned", "is easier to use them in vtk if they're 3D midpoints. if n_dim", ":param point_list: List of points to turn into vtk points :type point_list: {int:", "the largest path with # given segmentation # 4) Pick the maximum of", "= vtk.vtkUnsignedCharArray() id_array.SetName(\"Label_ID\") id_array.SetNumberOfComponents(1) id_array.SetNumberOfTuples(n_points) for (i, (pt, idx)) in points_list.items(): id_ =", "9.0), (5, 5, 9)) True >>> midpoints[53] == ((13.0, 0.0, 5.0), (13, 0,", ":type itk_image: `itk.Image` :return: Middle midpoints of the labels in the image. :rtype:", "label_idx in available_labels: extract_label = \\ itk.BinaryThresholdImageFilter[ (t_label_img, t_label_img)].New() extract_label.SetInput(itk_image) extract_label.SetUpperThreshold(label_idx) extract_label.SetLowerThreshold(label_idx) extract_label.SetOutsideValue(0)", "len(points_list.keys()) points = vtk.vtkPoints() vertices = vtk.vtkCellArray() id_array = vtk.vtkUnsignedCharArray() id_array.SetName(\"Label_ID\") id_array.SetNumberOfComponents(1) id_array.SetNumberOfTuples(n_points)", "extract_label.SetUpperThreshold(label_idx) extract_label.SetLowerThreshold(label_idx) extract_label.SetOutsideValue(0) extract_label.SetInsideValue(1) extract_label.Update() patches = \\ itk.ConnectedComponentImageFilter[ (t_label_img, t_label_img)].New() patches.SetInput(extract_label.GetOutput()) patches.Update()", "refine the results returned by itk # The results have to be processed", "Midpoints of the individual structures expressed as vtk.vtkPolyData() :rtype: `vtk.vtkPolyData` \"\"\" try: vtk.vtkVersion()", "the VTK support will be disabled. \"\"\" def calculate_labels_midpoints(itk_image): \"\"\" This function introduces", "of components # of the label image label_type = \\ pos_itk_core.io_image_type_to_component_string_name[ itk_image.__class__] #", "are allowed.\" assert data_type in [\"unsigned_char\", \"unsigned_short\"], \\ r(\"Incorrect data type for a", "map(int, midpoints[21][0]) == [14, 24, 0] True >>> midpoints[30] == ((0.0, 39.0, 0),", "label IDs to the individual points of the vtk points structure. Basically, you", "with the maximum value of the distance transform, location of the first one", "midpoints cannot be called centroids. for label_idx in available_labels: extract_label = \\ itk.BinaryThresholdImageFilter[", "possum import pos_itk_core from possum import pos_itk_transforms from possum.pos_common import r \"\"\" ..", "define the 'middle point' of given label # I call the midpoints 'middle", "of the first one is returned. One could think that probably a centre", "float, float)), ...} And now it it a time to do some unit", "given label # I call the midpoints 'middle midpoints' not centroids as centroids", "distance_transform = \\ itk.SignedMaurerDistanceMapImageFilter[ (t_label_img, t_float_img)].New() distance_transform.SetInput(largest_patch.GetOutput()) distance_transform.InsideIsPositiveOn() distance_transform.Update() centroid = itk.MinimumMaximumImageCalculator[t_float_img].New() centroid.SetImage(distance_transform.GetOutput())", "type. This one is identical # in terms of size and dimensionality as", "print middle_points.__repr__() return middle_points def points_to_vtk_points(points_list): \"\"\" The function converts the location of", "unsigned_char\\ and unsigned_short are accepted.\") # t_label_img is the ITK image type class", "of the individual structures expressed as vtk.vtkPolyData() :rtype: `vtk.vtkPolyData` \"\"\" try: vtk.vtkVersion() except:", "not the centroids what is calculated here. Anyway, this function calculated middle midpoints", "in the provided image. The midpoints are calculated in the following way: Now", ">>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) ==", "slightly refine the results returned by itk # The results have to be", "results returned by itk # The results have to be processed in a", "the vtk points structure. Basically, you can use the resulting vtkPolyData() and know", "be processed. :type itk_image: `itk.Image` :return: Middle midpoints of the labels in the", "t_float_img = \\ pos_itk_core.io_component_string_name_to_image_type[tuple(float_type)] # The purpose of the filter below is to", "also using another image type. This one is identical # in terms of", "this one has to be float to handle # the distance transform well.", "note :: Please have in ming that this procedure returns position of the", "itk_image: Labelled image, the image is expected to be a labelled image in", "of a particular structure. ... note :: This function will not work if", "the provided image. The midpoints are calculated in the following way: Now iterate", "points = vtk.vtkPoints() vertices = vtk.vtkCellArray() id_array = vtk.vtkUnsignedCharArray() id_array.SetName(\"Label_ID\") id_array.SetNumberOfComponents(1) id_array.SetNumberOfTuples(n_points) for", "(15, 15, 15)) True >>> midpoints[111] == ((5.0, 5.0, 9.0), (5, 5, 9))", "as there # might be multiple disjoint regions colored with given label #", "for everyday # use. # print middle_points.__repr__() return middle_points def points_to_vtk_points(points_list): \"\"\" The", "vtk.vtkUnsignedCharArray() id_array.SetName(\"Label_ID\") id_array.SetNumberOfComponents(1) id_array.SetNumberOfTuples(n_points) for (i, (pt, idx)) in points_list.items(): id_ = points.InsertNextPoint(pt)", "# encoding: utf-8 import os import sys import itk from possum import pos_itk_core", "image. The midpoints are calculated in the following way: Now iterate over all", "True \"\"\" C_BACKGROUND_LABEL_IDX = 0 # Define the dimensionality, data type and number", "utf-8 import os import sys import itk from possum import pos_itk_core from possum", "float, float)), ...} :return: Midpoints of the individual structures expressed as vtk.vtkPolyData() :rtype:", "there is more than one pixels with the maximum value of the distance", "id_array.SetTuple1(id_, i) point = vtk.vtkPolyData() point.SetPoints(points) point.SetVerts(vertices) point.GetPointData().AddArray(id_array) return point if __name__ ==", "calculated here. Anyway, this function calculated middle midpoints of labels in the provided", "path with # given segmentation # 4) Pick the maximum of the distance", "this function calculated middle midpoints of labels in the provided image. The midpoints", "particular structure. ... note :: This function will not work if the vtk", "but no. It is unknown is such centre would be located in the", "different properties will not be processed. :type itk_image: `itk.Image` :return: Middle midpoints of", "labelled image. # The differe is in data type: this one has to", "way for # two dimensional results and slightly different for 3D resuls: #", "list(label_type) float_type[1] = \"float\" t_float_img = \\ pos_itk_core.io_component_string_name_to_image_type[tuple(float_type)] # The purpose of the", "workflow for calculating the middle midpoints of the labelled imags. The term 'middle", "i) point = vtk.vtkPolyData() point.SetPoints(points) point.SetVerts(vertices) point.GetPointData().AddArray(id_array) return point if __name__ == '__main__':", "have to map the available labels returned by itk # as sometimes strange", "sometimes strange things happen and they are returned as longints # which are", ">>> sorted(midpoints.keys()) == [1, 2, 3, 10, 11, 12, 13, 20, 21, 22,", "given segmentation and by this define the 'middle point' of given label. ..", "2: point = map(float, point) + [0] index = map(int, index) + [0]", "2D midpoints are converted into 3D midpoints since # it is easier to", "ITK image type class to be used in filters # templates. t_label_img =", "do some unit testing. Please also consited this set of unittests as an", "It is unknown is such centre would be located in the actual structure", "how to use this function. >>> import base64 >>> from possum import pos_itk_transforms", "image provided and check if they are # ok to use in the", "...} And now it it a time to do some unit testing. Please", "disjoint regions colored with given label # 3) Apply the distance transform to", "type class to be used in filters # templates. t_label_img = itk_image.__class__ #", "a workflow for calculating the middle midpoints of the labelled imags. The term", "...} :return: Midpoints of the individual structures expressed as vtk.vtkPolyData() :rtype: `vtk.vtkPolyData` \"\"\"", "One could think that probably a centre of mass of the max voxels", "unique_labels.CalculatePixelIndicesOff() unique_labels.Update() # This is where we'll collect the results. We collect, both,", "called centroids. for label_idx in available_labels: extract_label = \\ itk.BinaryThresholdImageFilter[ (t_label_img, t_label_img)].New() extract_label.SetInput(itk_image)", "returned by itk # The results have to be processed in a slightly", "# might be multiple disjoint regions colored with given label # 3) Apply", "regions colored with given label 3) Apply the distance transform to the largest", "it is not the centroids what is calculated here. Anyway, this function calculated", "== type(1) False >>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) == type(1.0) True", "type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) == type(1.0) True >>> os.remove(input_filename) Now we", "float), (float, float, float)), ...} And now it it a time to do", "\\ r(\"Incorrect data type for a labelled image only unsigned_char\\ and unsigned_short are", "[1, 2, 3, 10, 11, 12, 13, 20, 21, 22, 23, 30, 31,", "on purpose. You might think that we're calculating centroids here, but not. I", "float_type = list(label_type) float_type[1] = \"float\" t_float_img = \\ pos_itk_core.io_component_string_name_to_image_type[tuple(float_type)] # The purpose", "number_of_components == 1, \\ \"Only single component images are allowed.\" assert data_type in", "30, 31, 32, 33] True >>> map(int, midpoints[1][0]) == [14, 0, 0] True", "collect, both, the physical # location as well as the middle_points = {}", "labels except the background label which has been removed. The overall idea of", "it it a time to do some unit testing. Please also consited this", "midpoints are converted into 3D midpoints since # it is easier to use", "float, float), (float, float, float)), ...} And now it it a time to", "centroid.Compute() centroid.GetIndexOfMaximum() index = centroid.GetIndexOfMaximum() point = itk_image.TransformIndexToPhysicalPoint(index) # We need to slightly", "This one is identical # in terms of size and dimensionality as the", "The purpose of the filter below is to define the unique labels #", "or `ushort` type, to have a single component and to have a dimensionality", "This means that if there is more than one pixels with the maximum", "it a safety precaution available_labels = map(int, unique_labels.GetLabels()) # Now we need to", "calculated in a different # way. Our center midpoints cannot be called centroids.", "patches.SetInput(extract_label.GetOutput()) patches.Update() largest_patch = \\ itk.LabelShapeKeepNObjectsImageFilter[t_label_img].New() largest_patch.SetInput(patches.GetOutput()) largest_patch.SetBackgroundValue(0) largest_patch.SetNumberOfObjects(1) largest_patch.SetAttribute(100) largest_patch.Update() distance_transform =", "here. Anyway, this function calculated middle midpoints of labels in the provided image.", "given segmentation 4) Pick the maximum of the distance transform for given segmentation", "itk_image = pos_itk_transforms.read_itk_image(input_filename) >>> midpoints = calculate_labels_midpoints(itk_image) >>> sorted(midpoints.keys()) == [1, 2, 3,", "in [\"unsigned_char\", \"unsigned_short\"], \\ r(\"Incorrect data type for a labelled image only unsigned_char\\", "middle midpoints of the labelled imags. The term 'middle midpoints' is used on", "centroid.GetIndexOfMaximum() point = itk_image.TransformIndexToPhysicalPoint(index) # We need to slightly refine the results returned", "= list(label_type) float_type[1] = \"float\" t_float_img = \\ pos_itk_core.io_component_string_name_to_image_type[tuple(float_type)] # The purpose of", "disabled. \"\"\" def calculate_labels_midpoints(itk_image): \"\"\" This function introduces a workflow for calculating the", "results may look wired but they are actually ok. :param itk_image: Labelled image,", "class to be used in filters # templates. t_label_img = itk_image.__class__ # We'll", "available the VTK support will be disabled. \"\"\" def calculate_labels_midpoints(itk_image): \"\"\" This function", "to the largest path with given segmentation 4) Pick the maximum of the", "distance_transform.InsideIsPositiveOn() distance_transform.Update() centroid = itk.MinimumMaximumImageCalculator[t_float_img].New() centroid.SetImage(distance_transform.GetOutput()) centroid.Compute() centroid.GetIndexOfMaximum() index = centroid.GetIndexOfMaximum() point =", "with python in type. # Consider it a safety precaution available_labels = map(int,", "None n_points = len(points_list.keys()) points = vtk.vtkPoints() vertices = vtk.vtkCellArray() id_array = vtk.vtkUnsignedCharArray()", "We'll be also using another image type. This one is identical # in", "Extract the largest patch of the segmentation as there might be multiple disjoint", "apparently incomparibile with python in type. # Consider it a safety precaution available_labels", "type(midpoints[30][0][1]) == type(1.0) True >>> os.remove(input_filename) Now we will try to process a", "results have to be processed in a slightly different way for # two", "# Now iterate over all available labels except the background label which #", "to be processed in a slightly different way for # two dimensional results", "introduces a workflow for calculating the middle midpoints of the labelled imags. The", "to individual structures. Formally this means that the image has to be of", "and number of components # of the label image label_type = \\ pos_itk_core.io_image_type_to_component_string_name[", "has to be of `uchar` or `ushort` type, to have a single component", "# 2) Extract the largest patch of the segmentation as there # might", "in available_labels: extract_label = \\ itk.BinaryThresholdImageFilter[ (t_label_img, t_label_img)].New() extract_label.SetInput(itk_image) extract_label.SetUpperThreshold(label_idx) extract_label.SetLowerThreshold(label_idx) extract_label.SetOutsideValue(0) extract_label.SetInsideValue(1)", "this procedure returns position of the first (index-wise) voxel with the maimum value.", "the midpoints 'middle midpoints' not centroids as centroids # are something different and", "str(type(midpoints)) == \"<type 'dict'>\" True >>> len(midpoints.keys()) == 63 True >>> str(midpoints.get(0,None)) ==", "in [2, 3], \\ \"Incorrect dimensionality.\" assert number_of_components == 1, \\ \"Only single", "collect the results. We collect, both, the physical # location as well as", "This function will not work if the vtk module is not loaded. :param", "distance transform, location of the first one is returned. One could think that", "((5.0, 5.0, 9.0), (5, 5, 9)) True >>> midpoints[53] == ((13.0, 0.0, 5.0),", "((5.0, 0.0, 0.0), (5, 0, 0)) True >>> type(midpoints[30][0][1]) == type(1) False >>>", "midpoints of labels in the provided image. The midpoints are calculated in the", "ok. :param itk_image: Labelled image, the image is expected to be a labelled", "3) Apply the distance transform to the largest path with given segmentation 4)", "(0, 39, 0)) True >>> type(midpoints[30][1][1]) == type(1) True >>> type(midpoints[30][0][1]) == type(1)", "type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) == type(1.0)", "# This is where we'll collect the results. We collect, both, the physical", "import pos_itk_core from possum import pos_itk_transforms from possum.pos_common import r \"\"\" .. note::", "now it it a time to do some unit testing. Please also consited", "one pixels with the maximum value of the distance transform, location of the", "== type(1.0) True >>> midpoints[183] == ((15.0, 15.0, 15.0), (15, 15, 15)) True", "point_list: {int: ((float, float, float), (float, float, float)), ...} :return: Midpoints of the", "# Consider it a safety precaution available_labels = map(int, unique_labels.GetLabels()) # Now we", "disjoint regions colored with given label 3) Apply the distance transform to the", "`ushort` type, to have a single component and to have a dimensionality of", "The midpoints are calculated in the following way: Now iterate over all available", "converted into 3D midpoints since # it is easier to use them in", "midpoints since # it is easier to use them in vtk if they're", "\\ itk.BinaryThresholdImageFilter[ (t_label_img, t_label_img)].New() extract_label.SetInput(itk_image) extract_label.SetUpperThreshold(label_idx) extract_label.SetLowerThreshold(label_idx) extract_label.SetOutsideValue(0) extract_label.SetInsideValue(1) extract_label.Update() patches = \\", "available labels except the background label which has been removed. The overall idea", "points_list.items(): id_ = points.InsertNextPoint(pt) vertices.InsertNextCell(1) vertices.InsertCellPoint(id_) id_array.SetTuple1(id_, i) point = vtk.vtkPolyData() point.SetPoints(points) point.SetVerts(vertices)", "one has to be float to handle # the distance transform well. float_type", "= map(float, point) index = map(int, index) middle_points[label_idx] = (tuple(point), tuple(index)) # Below", "to do some unit testing. Please also consited this set of unittests as", "are converted into 3D midpoints since # it is easier to use them", ">>> import base64 >>> from possum import pos_itk_transforms >>> example_two_dimensions='<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_two_dimensions.nii.gz\" >>>", "middle_points = {} # We have to map the available labels returned by", "Define the dimensionality, data type and number of components # of the label", "terms of size and dimensionality as the labelled image. # The differe is", "to have a dimensionality of two or three. Images having different properties will", "segmentation and by this define the 'middle point' of given label. .. note", "with # given segmentation # 4) Pick the maximum of the distance transform", "from the segmentation # 2) Extract the largest patch of the segmentation as", "index = centroid.GetIndexOfMaximum() point = itk_image.TransformIndexToPhysicalPoint(index) # We need to slightly refine the", "the image. :rtype: {int: ((float, float, float), (float, float, float)), ...} And now", "and by this define the 'middle point' of given label. .. note ::", "both, the physical # location as well as the middle_points = {} #", "itk_image.__class__ # We'll be also using another image type. This one is identical", "\"unsigned_short\"], \\ r(\"Incorrect data type for a labelled image only unsigned_char\\ and unsigned_short", "of this loop is to: # 1) Extract given label from the segmentation", "for # two dimensional results and slightly different for 3D resuls: # Again,", "\"\"\" try: vtk.vtkVersion() except: return None n_points = len(points_list.keys()) points = vtk.vtkPoints() vertices", "overall idea of this loop is to: 1) Extract given label from the", "such # label actually exists) C_BACKGROUND_LABEL_IDX try: available_labels.remove(C_BACKGROUND_LABEL_IDX) except: pass # Now iterate", "assert data_type in [\"unsigned_char\", \"unsigned_short\"], \\ r(\"Incorrect data type for a labelled image", "not. I use the term 'middle midpoints' as it is not the centroids", "itk from possum import pos_itk_core from possum import pos_itk_transforms from possum.pos_common import r", "is returned. One could think that probably a centre of mass of the", "define the unique labels # given segmentation contains. unique_labels = \\ itk.LabelGeometryImageFilter[(t_label_img, t_label_img)].New()", "of the image provided and check if they are # ok to use", "try to process a 3D image >>> example_three_dimensions=\"<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_three_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_three_dimensions))", "is identical # in terms of size and dimensionality as the labelled image.", "the physical # location as well as the middle_points = {} # We", "vertices.InsertNextCell(1) vertices.InsertCellPoint(id_) id_array.SetTuple1(id_, i) point = vtk.vtkPolyData() point.SetPoints(points) point.SetVerts(vertices) point.GetPointData().AddArray(id_array) return point if", "by itk # The results have to be processed in a slightly different", "points of the vtk points structure. Basically, you can use the resulting vtkPolyData()", "wired but they are actually ok. :param itk_image: Labelled image, the image is", "pos_itk_core.io_component_string_name_to_image_type[tuple(float_type)] # The purpose of the filter below is to define the unique", "which # has been removed. The overall idea of this loop is to:", "\"None\" True >>> midpoints[1] == ((5.0, 0.0, 0.0), (5, 0, 0)) True >>>", "explicit casting assure types # compatibility. The 2D midpoints are converted into 3D", "to map the available labels returned by itk # as sometimes strange things", "id_array.SetNumberOfComponents(1) id_array.SetNumberOfTuples(n_points) for (i, (pt, idx)) in points_list.items(): id_ = points.InsertNextPoint(pt) vertices.InsertNextCell(1) vertices.InsertCellPoint(id_)", "the results. We collect, both, the physical # location as well as the", "have to be processed in a slightly different way for # two dimensional", "term 'middle midpoints' is used on purpose. You might think that we're calculating", "We collect, both, the physical # location as well as the middle_points =", "float to handle # the distance transform well. float_type = list(label_type) float_type[1] =", "incomparibile with python in type. # Consider it a safety precaution available_labels =", "the non-cruical, optional functions in this module require vtk module to be installed.", "removed. The overall idea of this loop is to: 1) Extract given label", "largest path with # given segmentation # 4) Pick the maximum of the", "but they are actually ok. :param itk_image: Labelled image, the image is expected", "provided and check if they are # ok to use in the routine.", "== type(1) False >>> type(midpoints[30][0][1]) == type(1.0) True >>> midpoints[183] == ((15.0, 15.0,", "True >>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) == type(1.0) True >>> os.remove(input_filename)", "python # encoding: utf-8 import os import sys import itk from possum import", "they are actually ok. :param itk_image: Labelled image, the image is expected to", "calculate_labels_midpoints(itk_image): \"\"\" This function introduces a workflow for calculating the middle midpoints of", "but not. I use the term 'middle midpoints' as it is not the", "midpoints[183] == ((15.0, 15.0, 15.0), (15, 15, 15)) True >>> midpoints[111] == ((5.0,", "the first (index-wise) voxel with the maimum value. This means that if there", "import r \"\"\" .. note:: Some of the non-cruical, optional functions in this", "\\ itk.LabelShapeKeepNObjectsImageFilter[t_label_img].New() largest_patch.SetInput(patches.GetOutput()) largest_patch.SetBackgroundValue(0) largest_patch.SetNumberOfObjects(1) largest_patch.SetAttribute(100) largest_patch.Update() distance_transform = \\ itk.SignedMaurerDistanceMapImageFilter[ (t_label_img, t_float_img)].New()", "do a lot of explicit casting assure types # compatibility. The 2D midpoints", "procedure returns position of the first (index-wise) voxel with the maimum value. This", "is expected to be a labelled image in which individual discrete values correspond", "Formally this means that the image has to be of `uchar` or `ushort`", "label from the segmentation 2) Extract the largest patch of the segmentation as", "= len(itk_image.GetLargestPossibleRegion().GetSize()) number_of_components = itk_image.GetNumberOfComponentsPerPixel() data_type = label_type[1] assert n_dim in [2, 3],", "3, 10, 11, 12, 13, 20, 21, 22, 23, 30, 31, 32, 33]", "id_array = vtk.vtkUnsignedCharArray() id_array.SetName(\"Label_ID\") id_array.SetNumberOfComponents(1) id_array.SetNumberOfTuples(n_points) for (i, (pt, idx)) in points_list.items(): id_", "by this define the 'middle point' of given label # I call the", "they are # ok to use in the routine. n_dim = len(itk_image.GetLargestPossibleRegion().GetSize()) number_of_components", "{int: ((float, float, float), (float, float, float)), ...} And now it it a", "0 # Define the dimensionality, data type and number of components # of", "Extract the details of the image provided and check if they are #", "5, 9)) True >>> midpoints[53] == ((13.0, 0.0, 5.0), (13, 0, 5)) True", "The overall idea of this loop is to: # 1) Extract given label", "Now we will try to process a 3D image >>> example_three_dimensions=\"<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_three_dimensions.nii.gz\"", "centre would be located in the actual structure or outside the structure. Therefore", "returned, but no. It is unknown is such centre would be located in", "set of unittests as an example how to use this function. >>> import", "dimensionality of two or three. Images having different properties will not be processed.", ">>> str(type(midpoints)) == \"<type 'dict'>\" True >>> len(midpoints.keys()) == 63 True >>> str(midpoints.get(0,None))", "(float, float, float)), ...} :return: Midpoints of the individual structures expressed as vtk.vtkPolyData()", "background label which has been removed. The overall idea of this loop is", ">>> type(midpoints[30][0][1]) == type(1.0) True >>> os.remove(input_filename) Now we will try to process", "to define the unique labels # given segmentation contains. unique_labels = \\ itk.LabelGeometryImageFilter[(t_label_img,", "the routine. n_dim = len(itk_image.GetLargestPossibleRegion().GetSize()) number_of_components = itk_image.GetNumberOfComponentsPerPixel() data_type = label_type[1] assert n_dim", "(pt, idx)) in points_list.items(): id_ = points.InsertNextPoint(pt) vertices.InsertNextCell(1) vertices.InsertCellPoint(id_) id_array.SetTuple1(id_, i) point =", "this define the 'middle point' of given label # I call the midpoints", "is used on purpose. You might think that we're calculating centroids here, but", "[14, 24, 0] True >>> midpoints[30] == ((0.0, 39.0, 0), (0, 39, 0))", "[\"unsigned_char\", \"unsigned_short\"], \\ r(\"Incorrect data type for a labelled image only unsigned_char\\ and", "be located in the actual structure or outside the structure. Therefore some of", "possum import pos_itk_transforms >>> example_two_dimensions='<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_two_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_two_dimensions)) >>> itk_image =", "of two or three. Images having different properties will not be processed. :type", "segmentation contains. unique_labels = \\ itk.LabelGeometryImageFilter[(t_label_img, t_label_img)].New() unique_labels.SetInput(itk_image) unique_labels.CalculatePixelIndicesOff() unique_labels.Update() # This is", "below is to define the unique labels # given segmentation contains. unique_labels =", "middle_points[label_idx] = (tuple(point), tuple(index)) # Below there is some debugging code. Not really", "the segmentation as there # might be multiple disjoint regions colored with given", "that probably a centre of mass of the max voxels should be returned,", "background label which # has been removed. The overall idea of this loop", "= \\ itk.BinaryThresholdImageFilter[ (t_label_img, t_label_img)].New() extract_label.SetInput(itk_image) extract_label.SetUpperThreshold(label_idx) extract_label.SetLowerThreshold(label_idx) extract_label.SetOutsideValue(0) extract_label.SetInsideValue(1) extract_label.Update() patches =", "which individual discrete values correspond to individual structures. Formally this means that the", "= 0 # Define the dimensionality, data type and number of components #", "in points_list.items(): id_ = points.InsertNextPoint(pt) vertices.InsertNextCell(1) vertices.InsertCellPoint(id_) id_array.SetTuple1(id_, i) point = vtk.vtkPolyData() point.SetPoints(points)", "in terms of size and dimensionality as the labelled image. # The differe", "used on purpose. You might think that we're calculating centroids here, but not.", "of the max voxels should be returned, but no. It is unknown is", "of mass of the max voxels should be returned, but no. It is", "the image provided and check if they are # ok to use in", "be called centroids. for label_idx in available_labels: extract_label = \\ itk.BinaryThresholdImageFilter[ (t_label_img, t_label_img)].New()", "in which individual discrete values correspond to individual structures. Formally this means that", "idea of this loop is to: 1) Extract given label from the segmentation", "Apply the distance transform to the largest path with given segmentation 4) Pick", "from possum import pos_itk_transforms >>> example_two_dimensions='<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_two_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_two_dimensions)) >>> itk_image", "module is not loaded. :param point_list: List of points to turn into vtk", "def calculate_labels_midpoints(itk_image): \"\"\" This function introduces a workflow for calculating the middle midpoints", "[0] index = map(int, index) + [0] if n_dim == 3: point =", "be disabled. \"\"\" def calculate_labels_midpoints(itk_image): \"\"\" This function introduces a workflow for calculating", "label # I call the midpoints 'middle midpoints' not centroids as centroids #", "the labels in the image. :rtype: {int: ((float, float, float), (float, float, float)),", "Anyway, this function calculated middle midpoints of labels in the provided image. The", "os.remove(input_filename) >>> str(type(midpoints)) == \"<type 'dict'>\" True >>> len(midpoints.keys()) == 63 True >>>", "float, float), (float, float, float)), ...} :return: Midpoints of the individual structures expressed", "encoding: utf-8 import os import sys import itk from possum import pos_itk_core from", "from possum.pos_common import r \"\"\" .. note:: Some of the non-cruical, optional functions", "some debugging code. Not really important for everyday # use. # print middle_points.__repr__()", "actual structure or outside the structure. Therefore some of the results may look", ".. note:: Some of the non-cruical, optional functions in this module require vtk", "type: this one has to be float to handle # the distance transform", "itk_image: `itk.Image` :return: Middle midpoints of the labels in the image. :rtype: {int:", "midpoints = calculate_labels_midpoints(itk_image) >>> os.remove(input_filename) >>> str(type(midpoints)) == \"<type 'dict'>\" True >>> len(midpoints.keys())", "midpoints. if n_dim == 2: point = map(float, point) + [0] index =", "import pos_itk_transforms >>> example_two_dimensions='<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_two_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_two_dimensions)) >>> itk_image = pos_itk_transforms.read_itk_image(input_filename)", "label which # has been removed. The overall idea of this loop is", "True >>> midpoints[111] == ((5.0, 5.0, 9.0), (5, 5, 9)) True >>> midpoints[53]", "of the segmentation as there # might be multiple disjoint regions colored with", "15)) True >>> midpoints[111] == ((5.0, 5.0, 9.0), (5, 5, 9)) True >>>", "Pick the maximum of the distance transform for given segmentation # and by", "vtk.vtkPolyData() point.SetPoints(points) point.SetVerts(vertices) point.GetPointData().AddArray(id_array) return point if __name__ == '__main__': import doctest print", "with given segmentation 4) Pick the maximum of the distance transform for given", "python in type. # Consider it a safety precaution available_labels = map(int, unique_labels.GetLabels())", "have a dimensionality of two or three. Images having different properties will not", "open(input_filename, \"w\").write(base64.decodestring(example_two_dimensions)) >>> itk_image = pos_itk_transforms.read_itk_image(input_filename) >>> midpoints = calculate_labels_midpoints(itk_image) >>> sorted(midpoints.keys()) ==", "to: 1) Extract given label from the segmentation 2) Extract the largest patch", "segmentation 2) Extract the largest patch of the segmentation as there might be", "loop is to: 1) Extract given label from the segmentation 2) Extract the", "middle points into a vtkPolyData structure and assigns appropriate label IDs to the", "possum import pos_itk_transforms from possum.pos_common import r \"\"\" .. note:: Some of the", "purpose of the filter below is to define the unique labels # given", "to process a 3D image >>> example_three_dimensions=\"<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_three_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_three_dimensions)) >>>", "of explicit casting assure types # compatibility. The 2D midpoints are converted into", "pixels with the maximum value of the distance transform, location of the first", "this loop is to: # 1) Extract given label from the segmentation #", "unique_labels = \\ itk.LabelGeometryImageFilter[(t_label_img, t_label_img)].New() unique_labels.SetInput(itk_image) unique_labels.CalculatePixelIndicesOff() unique_labels.Update() # This is where we'll", "distance transform to the largest path with # given segmentation # 4) Pick", "itk_image = pos_itk_transforms.read_itk_image(input_filename) >>> midpoints = calculate_labels_midpoints(itk_image) >>> os.remove(input_filename) >>> str(type(midpoints)) == \"<type", "individual discrete values correspond to individual structures. Formally this means that the image", "multiple disjoint regions colored with given label 3) Apply the distance transform to", "and know where is a centre of a particular structure. ... note ::", "to be installed. If it is not available the VTK support will be", ":return: Middle midpoints of the labels in the image. :rtype: {int: ((float, float,", "\\ \"Incorrect dimensionality.\" assert number_of_components == 1, \\ \"Only single component images are", "functions in this module require vtk module to be installed. If it is", "t_label_img = itk_image.__class__ # We'll be also using another image type. This one", "== ((15.0, 15.0, 15.0), (15, 15, 15)) True >>> midpoints[111] == ((5.0, 5.0,", "of the first (index-wise) voxel with the maimum value. This means that if", "available labels returned by itk # as sometimes strange things happen and they", "is unknown is such centre would be located in the actual structure or", "n_points = len(points_list.keys()) points = vtk.vtkPoints() vertices = vtk.vtkCellArray() id_array = vtk.vtkUnsignedCharArray() id_array.SetName(\"Label_ID\")", "note:: Some of the non-cruical, optional functions in this module require vtk module", "of the distance transform, location of the first one is returned. One could", "= itk_image.TransformIndexToPhysicalPoint(index) # We need to slightly refine the results returned by itk", "'middle midpoints' not centroids as centroids # are something different and they are", "will not be processed. :type itk_image: `itk.Image` :return: Middle midpoints of the labels", "= pos_itk_transforms.read_itk_image(input_filename) >>> midpoints = calculate_labels_midpoints(itk_image) >>> sorted(midpoints.keys()) == [1, 2, 3, 10,", "given label from the segmentation 2) Extract the largest patch of the segmentation", ">>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) == type(1.0) True >>> midpoints[183] ==", "3D midpoints. if n_dim == 2: point = map(float, point) + [0] index", "True >>> midpoints[183] == ((15.0, 15.0, 15.0), (15, 15, 15)) True >>> midpoints[111]", "centre of mass of the max voxels should be returned, but no. It", "\"float\" t_float_img = \\ pos_itk_core.io_component_string_name_to_image_type[tuple(float_type)] # The purpose of the filter below is", "t_label_img)].New() patches.SetInput(extract_label.GetOutput()) patches.Update() largest_patch = \\ itk.LabelShapeKeepNObjectsImageFilter[t_label_img].New() largest_patch.SetInput(patches.GetOutput()) largest_patch.SetBackgroundValue(0) largest_patch.SetNumberOfObjects(1) largest_patch.SetAttribute(100) largest_patch.Update() distance_transform", ":param itk_image: Labelled image, the image is expected to be a labelled image", "regions colored with given label # 3) Apply the distance transform to the", "assigns appropriate label IDs to the individual points of the vtk points structure.", "single component images are allowed.\" assert data_type in [\"unsigned_char\", \"unsigned_short\"], \\ r(\"Incorrect data", "slightly different way for # two dimensional results and slightly different for 3D", "largest_patch.SetNumberOfObjects(1) largest_patch.SetAttribute(100) largest_patch.Update() distance_transform = \\ itk.SignedMaurerDistanceMapImageFilter[ (t_label_img, t_float_img)].New() distance_transform.SetInput(largest_patch.GetOutput()) distance_transform.InsideIsPositiveOn() distance_transform.Update() centroid", "dimensionality.\" assert number_of_components == 1, \\ \"Only single component images are allowed.\" assert", "= calculate_labels_midpoints(itk_image) >>> os.remove(input_filename) >>> str(type(midpoints)) == \"<type 'dict'>\" True >>> len(midpoints.keys()) ==", "are accepted.\") # t_label_img is the ITK image type class to be used", "idea of this loop is to: # 1) Extract given label from the", "the distance transform for given segmentation and by this define the 'middle point'", "probably a centre of mass of the max voxels should be returned, but", "The results have to be processed in a slightly different way for #", "# We have to map the available labels returned by itk # as", "Therefore some of the results may look wired but they are actually ok.", "dimensionality as the labelled image. # The differe is in data type: this", "t_label_img)].New() unique_labels.SetInput(itk_image) unique_labels.CalculatePixelIndicesOff() unique_labels.Update() # This is where we'll collect the results. We", "there is some debugging code. Not really important for everyday # use. #", "itk # The results have to be processed in a slightly different way", "((13.0, 0.0, 5.0), (13, 0, 5)) True \"\"\" C_BACKGROUND_LABEL_IDX = 0 # Define", "= pos_itk_transforms.read_itk_image(input_filename) >>> midpoints = calculate_labels_midpoints(itk_image) >>> os.remove(input_filename) >>> str(type(midpoints)) == \"<type 'dict'>\"", "== ((0.0, 39.0, 0), (0, 39, 0)) True >>> type(midpoints[30][1][1]) == type(1) True", "3], \\ \"Incorrect dimensionality.\" assert number_of_components == 1, \\ \"Only single component images", "a particular structure. ... note :: This function will not work if the", "where is a centre of a particular structure. ... note :: This function", "middle_points def points_to_vtk_points(points_list): \"\"\" The function converts the location of the middle points", "or outside the structure. Therefore some of the results may look wired but", "maimum value. This means that if there is more than one pixels with", "voxel with the maimum value. This means that if there is more than", "to be float to handle # the distance transform well. float_type = list(label_type)", "# two dimensional results and slightly different for 3D resuls: # Again, we", "the labelled image. # The differe is in data type: this one has", "# Now we need to remove the background label (if such # label", "not centroids as centroids # are something different and they are calculated in", "2) Extract the largest patch of the segmentation as there might be multiple", "Labelled image, the image is expected to be a labelled image in which", "31, 32, 33] True >>> map(int, midpoints[1][0]) == [14, 0, 0] True >>>", "label 3) Apply the distance transform to the largest path with given segmentation", "\\ itk.LabelGeometryImageFilter[(t_label_img, t_label_img)].New() unique_labels.SetInput(itk_image) unique_labels.CalculatePixelIndicesOff() unique_labels.Update() # This is where we'll collect the", "accepted.\") # t_label_img is the ITK image type class to be used in", "# label actually exists) C_BACKGROUND_LABEL_IDX try: available_labels.remove(C_BACKGROUND_LABEL_IDX) except: pass # Now iterate over", "= centroid.GetIndexOfMaximum() point = itk_image.TransformIndexToPhysicalPoint(index) # We need to slightly refine the results", "use them in vtk if they're 3D midpoints. if n_dim == 2: point", "think that we're calculating centroids here, but not. I use the term 'middle", "and unsigned_short are accepted.\") # t_label_img is the ITK image type class to", "over all available labels except the background label which has been removed. The", "to use them in vtk if they're 3D midpoints. if n_dim == 2:", "following way: Now iterate over all available labels except the background label which", "pos_itk_transforms.read_itk_image(input_filename) >>> midpoints = calculate_labels_midpoints(itk_image) >>> sorted(midpoints.keys()) == [1, 2, 3, 10, 11,", "midpoints[1][0]) == [14, 0, 0] True >>> map(int, midpoints[21][0]) == [14, 24, 0]", "point = map(float, point) index = map(int, index) middle_points[label_idx] = (tuple(point), tuple(index)) #", "way. Our center midpoints cannot be called centroids. for label_idx in available_labels: extract_label", "extract_label = \\ itk.BinaryThresholdImageFilter[ (t_label_img, t_label_img)].New() extract_label.SetInput(itk_image) extract_label.SetUpperThreshold(label_idx) extract_label.SetLowerThreshold(label_idx) extract_label.SetOutsideValue(0) extract_label.SetInsideValue(1) extract_label.Update() patches", "n_dim == 3: point = map(float, point) index = map(int, index) middle_points[label_idx] =", "id_array.SetName(\"Label_ID\") id_array.SetNumberOfComponents(1) id_array.SetNumberOfTuples(n_points) for (i, (pt, idx)) in points_list.items(): id_ = points.InsertNextPoint(pt) vertices.InsertNextCell(1)", "unknown is such centre would be located in the actual structure or outside", "0), (0, 39, 0)) True >>> type(midpoints[30][1][1]) == type(1) True >>> type(midpoints[30][0][1]) ==", "point) index = map(int, index) middle_points[label_idx] = (tuple(point), tuple(index)) # Below there is", "are actually ok. :param itk_image: Labelled image, the image is expected to be", "3D image >>> example_three_dimensions=\"<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_three_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_three_dimensions)) >>> itk_image = pos_itk_transforms.read_itk_image(input_filename)", "we'll collect the results. We collect, both, the physical # location as well", "are apparently incomparibile with python in type. # Consider it a safety precaution", "True >>> type(midpoints[30][1][1]) == type(1) True >>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1])", "`vtk.vtkPolyData` \"\"\" try: vtk.vtkVersion() except: return None n_points = len(points_list.keys()) points = vtk.vtkPoints()", "a safety precaution available_labels = map(int, unique_labels.GetLabels()) # Now we need to remove", "I use the term 'middle midpoints' as it is not the centroids what", "+ [0] index = map(int, index) + [0] if n_dim == 3: point", ">>> map(int, midpoints[1][0]) == [14, 0, 0] True >>> map(int, midpoints[21][0]) == [14,", "if they are # ok to use in the routine. n_dim = len(itk_image.GetLargestPossibleRegion().GetSize())", "centroid.SetImage(distance_transform.GetOutput()) centroid.Compute() centroid.GetIndexOfMaximum() index = centroid.GetIndexOfMaximum() point = itk_image.TransformIndexToPhysicalPoint(index) # We need to", "\"w\").write(base64.decodestring(example_two_dimensions)) >>> itk_image = pos_itk_transforms.read_itk_image(input_filename) >>> midpoints = calculate_labels_midpoints(itk_image) >>> sorted(midpoints.keys()) == [1,", "in the routine. n_dim = len(itk_image.GetLargestPossibleRegion().GetSize()) number_of_components = itk_image.GetNumberOfComponentsPerPixel() data_type = label_type[1] assert", "different for 3D resuls: # Again, we do a lot of explicit casting", ">>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) == type(1.0) True >>> os.remove(input_filename) Now", "# t_label_img is the ITK image type class to be used in filters", "largest_patch.SetAttribute(100) largest_patch.Update() distance_transform = \\ itk.SignedMaurerDistanceMapImageFilter[ (t_label_img, t_float_img)].New() distance_transform.SetInput(largest_patch.GetOutput()) distance_transform.InsideIsPositiveOn() distance_transform.Update() centroid =", "== type(1.0) True >>> os.remove(input_filename) Now we will try to process a 3D", "assert n_dim in [2, 3], \\ \"Incorrect dimensionality.\" assert number_of_components == 1, \\", "we need to remove the background label (if such # label actually exists)", "given segmentation contains. unique_labels = \\ itk.LabelGeometryImageFilter[(t_label_img, t_label_img)].New() unique_labels.SetInput(itk_image) unique_labels.CalculatePixelIndicesOff() unique_labels.Update() # This", "resuls: # Again, we do a lot of explicit casting assure types #", "5)) True \"\"\" C_BACKGROUND_LABEL_IDX = 0 # Define the dimensionality, data type and", "centroids what is calculated here. Anyway, this function calculated middle midpoints of labels", "is calculated here. Anyway, this function calculated middle midpoints of labels in the", "maximum of the distance transform for given segmentation # and by this define", "midpoints[53] == ((13.0, 0.0, 5.0), (13, 0, 5)) True \"\"\" C_BACKGROUND_LABEL_IDX = 0", "should be returned, but no. It is unknown is such centre would be", "segmentation as there # might be multiple disjoint regions colored with given label", "the middle points into a vtkPolyData structure and assigns appropriate label IDs to", "IDs to the individual points of the vtk points structure. Basically, you can", "segmentation as there might be multiple disjoint regions colored with given label 3)", "= points.InsertNextPoint(pt) vertices.InsertNextCell(1) vertices.InsertCellPoint(id_) id_array.SetTuple1(id_, i) point = vtk.vtkPolyData() point.SetPoints(points) point.SetVerts(vertices) point.GetPointData().AddArray(id_array) return", "are something different and they are calculated in a different # way. Our", "'middle point' of given label. .. note :: Please have in ming that", "the image is expected to be a labelled image in which individual discrete", "is not available the VTK support will be disabled. \"\"\" def calculate_labels_midpoints(itk_image): \"\"\"", "type(1) False >>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) == type(1.0) True >>>", "23, 30, 31, 32, 33] True >>> map(int, midpoints[1][0]) == [14, 0, 0]", "installed. If it is not available the VTK support will be disabled. \"\"\"", "image label_type = \\ pos_itk_core.io_image_type_to_component_string_name[ itk_image.__class__] # Extract the details of the image", "label (if such # label actually exists) C_BACKGROUND_LABEL_IDX try: available_labels.remove(C_BACKGROUND_LABEL_IDX) except: pass #", "centroids # are something different and they are calculated in a different #", "We need to slightly refine the results returned by itk # The results", "with the maimum value. This means that if there is more than one", "of the middle points into a vtkPolyData structure and assigns appropriate label IDs", "given segmentation # and by this define the 'middle point' of given label", "# the distance transform well. float_type = list(label_type) float_type[1] = \"float\" t_float_img =", "... note :: This function will not work if the vtk module is", "`itk.Image` :return: Middle midpoints of the labels in the image. :rtype: {int: ((float,", "first one is returned. One could think that probably a centre of mass", "this loop is to: 1) Extract given label from the segmentation 2) Extract", "exists) C_BACKGROUND_LABEL_IDX try: available_labels.remove(C_BACKGROUND_LABEL_IDX) except: pass # Now iterate over all available labels", "for given segmentation # and by this define the 'middle point' of given", "properties will not be processed. :type itk_image: `itk.Image` :return: Middle midpoints of the", "not be processed. :type itk_image: `itk.Image` :return: Middle midpoints of the labels in", "that the image has to be of `uchar` or `ushort` type, to have", "2) Extract the largest patch of the segmentation as there # might be", "testing. Please also consited this set of unittests as an example how to", "we will try to process a 3D image >>> example_three_dimensions=\"<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_three_dimensions.nii.gz\" >>>", "to handle # the distance transform well. float_type = list(label_type) float_type[1] = \"float\"", "map(float, point) index = map(int, index) middle_points[label_idx] = (tuple(point), tuple(index)) # Below there", "components # of the label image label_type = \\ pos_itk_core.io_image_type_to_component_string_name[ itk_image.__class__] # Extract", "label from the segmentation # 2) Extract the largest patch of the segmentation", "= vtk.vtkPolyData() point.SetPoints(points) point.SetVerts(vertices) point.GetPointData().AddArray(id_array) return point if __name__ == '__main__': import doctest", "points.InsertNextPoint(pt) vertices.InsertNextCell(1) vertices.InsertCellPoint(id_) id_array.SetTuple1(id_, i) point = vtk.vtkPolyData() point.SetPoints(points) point.SetVerts(vertices) point.GetPointData().AddArray(id_array) return point", "could think that probably a centre of mass of the max voxels should", "use. # print middle_points.__repr__() return middle_points def points_to_vtk_points(points_list): \"\"\" The function converts the", "import pos_itk_transforms from possum.pos_common import r \"\"\" .. note:: Some of the non-cruical,", "5.0), (13, 0, 5)) True \"\"\" C_BACKGROUND_LABEL_IDX = 0 # Define the dimensionality,", "0.0), (5, 0, 0)) True >>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) ==", "dimensional results and slightly different for 3D resuls: # Again, we do a", "the segmentation as there might be multiple disjoint regions colored with given label", "the individual points of the vtk points structure. Basically, you can use the", "the 'middle point' of given label. .. note :: Please have in ming", "itk.SignedMaurerDistanceMapImageFilter[ (t_label_img, t_float_img)].New() distance_transform.SetInput(largest_patch.GetOutput()) distance_transform.InsideIsPositiveOn() distance_transform.Update() centroid = itk.MinimumMaximumImageCalculator[t_float_img].New() centroid.SetImage(distance_transform.GetOutput()) centroid.Compute() centroid.GetIndexOfMaximum() index", "results and slightly different for 3D resuls: # Again, we do a lot", "labels # given segmentation contains. unique_labels = \\ itk.LabelGeometryImageFilter[(t_label_img, t_label_img)].New() unique_labels.SetInput(itk_image) unique_labels.CalculatePixelIndicesOff() unique_labels.Update()", "itk.BinaryThresholdImageFilter[ (t_label_img, t_label_img)].New() extract_label.SetInput(itk_image) extract_label.SetUpperThreshold(label_idx) extract_label.SetLowerThreshold(label_idx) extract_label.SetOutsideValue(0) extract_label.SetInsideValue(1) extract_label.Update() patches = \\ itk.ConnectedComponentImageFilter[", "# print middle_points.__repr__() return middle_points def points_to_vtk_points(points_list): \"\"\" The function converts the location", "itk_image.__class__] # Extract the details of the image provided and check if they", "39.0, 0), (0, 39, 0)) True >>> type(midpoints[30][1][1]) == type(1) True >>> type(midpoints[30][0][1])", "be multiple disjoint regions colored with given label # 3) Apply the distance", "# as sometimes strange things happen and they are returned as longints #", "if they're 3D midpoints. if n_dim == 2: point = map(float, point) +", "returns position of the first (index-wise) voxel with the maimum value. This means", "from possum import pos_itk_core from possum import pos_itk_transforms from possum.pos_common import r \"\"\"", "Consider it a safety precaution available_labels = map(int, unique_labels.GetLabels()) # Now we need", "Images having different properties will not be processed. :type itk_image: `itk.Image` :return: Middle", "is to define the unique labels # given segmentation contains. unique_labels = \\", "location of the first one is returned. One could think that probably a", ":: This function will not work if the vtk module is not loaded.", "the distance transform well. float_type = list(label_type) float_type[1] = \"float\" t_float_img = \\", ">>> type(midpoints[30][0][1]) == type(1.0) True >>> midpoints[183] == ((15.0, 15.0, 15.0), (15, 15,", "example_two_dimensions='<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_two_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_two_dimensions)) >>> itk_image = pos_itk_transforms.read_itk_image(input_filename) >>> midpoints =", "Now iterate over all available labels except the background label which # has", "True >>> midpoints[30] == ((0.0, 39.0, 0), (0, 39, 0)) True >>> type(midpoints[30][1][1])", "there # might be multiple disjoint regions colored with given label # 3)", "transform to the largest path with given segmentation 4) Pick the maximum of", "# has been removed. The overall idea of this loop is to: #", ">>> input_filename=\"/tmp/pos_itk_centroids_example_two_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_two_dimensions)) >>> itk_image = pos_itk_transforms.read_itk_image(input_filename) >>> midpoints = calculate_labels_midpoints(itk_image)", "3: point = map(float, point) index = map(int, index) middle_points[label_idx] = (tuple(point), tuple(index))", "Our center midpoints cannot be called centroids. for label_idx in available_labels: extract_label =", "= \\ pos_itk_core.io_image_type_to_component_string_name[ itk_image.__class__] # Extract the details of the image provided and", "removed. The overall idea of this loop is to: # 1) Extract given", "(5, 5, 9)) True >>> midpoints[53] == ((13.0, 0.0, 5.0), (13, 0, 5))", "iterate over all available labels except the background label which # has been", "mass of the max voxels should be returned, but no. It is unknown", "the centroids what is calculated here. Anyway, this function calculated middle midpoints of", ":type point_list: {int: ((float, float, float), (float, float, float)), ...} :return: Midpoints of", "point = map(float, point) + [0] index = map(int, index) + [0] if", "value. This means that if there is more than one pixels with the", "vtkPolyData() and know where is a centre of a particular structure. ... note", "different # way. Our center midpoints cannot be called centroids. for label_idx in", "== \"<type 'dict'>\" True >>> len(midpoints.keys()) == 63 True >>> str(midpoints.get(0,None)) == \"None\"", "in data type: this one has to be float to handle # the", "\"\"\" The function converts the location of the middle points into a vtkPolyData", "into vtk points :type point_list: {int: ((float, float, float), (float, float, float)), ...}", "available_labels = map(int, unique_labels.GetLabels()) # Now we need to remove the background label", "itk # as sometimes strange things happen and they are returned as longints", "the individual structures expressed as vtk.vtkPolyData() :rtype: `vtk.vtkPolyData` \"\"\" try: vtk.vtkVersion() except: return", ">>> open(input_filename, \"w\").write(base64.decodestring(example_three_dimensions)) >>> itk_image = pos_itk_transforms.read_itk_image(input_filename) >>> midpoints = calculate_labels_midpoints(itk_image) >>> os.remove(input_filename)", "has been removed. The overall idea of this loop is to: # 1)", "contains. unique_labels = \\ itk.LabelGeometryImageFilter[(t_label_img, t_label_img)].New() unique_labels.SetInput(itk_image) unique_labels.CalculatePixelIndicesOff() unique_labels.Update() # This is where", "know where is a centre of a particular structure. ... note :: This", "len(itk_image.GetLargestPossibleRegion().GetSize()) number_of_components = itk_image.GetNumberOfComponentsPerPixel() data_type = label_type[1] assert n_dim in [2, 3], \\", "can use the resulting vtkPolyData() and know where is a centre of a", "function introduces a workflow for calculating the middle midpoints of the labelled imags.", "use in the routine. n_dim = len(itk_image.GetLargestPossibleRegion().GetSize()) number_of_components = itk_image.GetNumberOfComponentsPerPixel() data_type = label_type[1]", "Extract given label from the segmentation # 2) Extract the largest patch of", "path with given segmentation 4) Pick the maximum of the distance transform for", "False >>> type(midpoints[30][0][1]) == type(1.0) True >>> os.remove(input_filename) Now we will try to", "+ [0] if n_dim == 3: point = map(float, point) index = map(int,", "to be used in filters # templates. t_label_img = itk_image.__class__ # We'll be", "two dimensional results and slightly different for 3D resuls: # Again, we do", "largest_patch.SetInput(patches.GetOutput()) largest_patch.SetBackgroundValue(0) largest_patch.SetNumberOfObjects(1) largest_patch.SetAttribute(100) largest_patch.Update() distance_transform = \\ itk.SignedMaurerDistanceMapImageFilter[ (t_label_img, t_float_img)].New() distance_transform.SetInput(largest_patch.GetOutput()) distance_transform.InsideIsPositiveOn()", "labels except the background label which # has been removed. The overall idea", "in filters # templates. t_label_img = itk_image.__class__ # We'll be also using another", "given label. .. note :: Please have in ming that this procedure returns", "# Again, we do a lot of explicit casting assure types # compatibility.", "we do a lot of explicit casting assure types # compatibility. The 2D", "33] True >>> map(int, midpoints[1][0]) == [14, 0, 0] True >>> map(int, midpoints[21][0])", "the distance transform to the largest path with # given segmentation # 4)", "strange things happen and they are returned as longints # which are apparently", "routine. n_dim = len(itk_image.GetLargestPossibleRegion().GetSize()) number_of_components = itk_image.GetNumberOfComponentsPerPixel() data_type = label_type[1] assert n_dim in", "physical # location as well as the middle_points = {} # We have", "# Extract the details of the image provided and check if they are", "are # ok to use in the routine. n_dim = len(itk_image.GetLargestPossibleRegion().GetSize()) number_of_components =", "(t_label_img, t_label_img)].New() patches.SetInput(extract_label.GetOutput()) patches.Update() largest_patch = \\ itk.LabelShapeKeepNObjectsImageFilter[t_label_img].New() largest_patch.SetInput(patches.GetOutput()) largest_patch.SetBackgroundValue(0) largest_patch.SetNumberOfObjects(1) largest_patch.SetAttribute(100) largest_patch.Update()", "be processed in a slightly different way for # two dimensional results and", "segmentation 4) Pick the maximum of the distance transform for given segmentation and", "need to slightly refine the results returned by itk # The results have", "labels in the image. :rtype: {int: ((float, float, float), (float, float, float)), ...}", "note :: This function will not work if the vtk module is not", "(if such # label actually exists) C_BACKGROUND_LABEL_IDX try: available_labels.remove(C_BACKGROUND_LABEL_IDX) except: pass # Now", "Basically, you can use the resulting vtkPolyData() and know where is a centre", "# given segmentation contains. unique_labels = \\ itk.LabelGeometryImageFilter[(t_label_img, t_label_img)].New() unique_labels.SetInput(itk_image) unique_labels.CalculatePixelIndicesOff() unique_labels.Update() #", "example_three_dimensions=\"<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_three_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_three_dimensions)) >>> itk_image = pos_itk_transforms.read_itk_image(input_filename) >>> midpoints =", "as well as the middle_points = {} # We have to map the", "of the results may look wired but they are actually ok. :param itk_image:", "map the available labels returned by itk # as sometimes strange things happen", "that we're calculating centroids here, but not. I use the term 'middle midpoints'", "in ming that this procedure returns position of the first (index-wise) voxel with", "idx)) in points_list.items(): id_ = points.InsertNextPoint(pt) vertices.InsertNextCell(1) vertices.InsertCellPoint(id_) id_array.SetTuple1(id_, i) point = vtk.vtkPolyData()", "appropriate label IDs to the individual points of the vtk points structure. Basically,", "from the segmentation 2) Extract the largest patch of the segmentation as there", "of unittests as an example how to use this function. >>> import base64", "transform to the largest path with # given segmentation # 4) Pick the", "the image has to be of `uchar` or `ushort` type, to have a", "not loaded. :param point_list: List of points to turn into vtk points :type", "they are returned as longints # which are apparently incomparibile with python in", ":: Please have in ming that this procedure returns position of the first", "discrete values correspond to individual structures. Formally this means that the image has", "{int: ((float, float, float), (float, float, float)), ...} :return: Midpoints of the individual", "the following way: Now iterate over all available labels except the background label", "max voxels should be returned, but no. It is unknown is such centre", "vtk points structure. Basically, you can use the resulting vtkPolyData() and know where", "labelled image only unsigned_char\\ and unsigned_short are accepted.\") # t_label_img is the ITK", "of size and dimensionality as the labelled image. # The differe is in", "a centre of a particular structure. ... note :: This function will not", "midpoints[30] == ((0.0, 39.0, 0), (0, 39, 0)) True >>> type(midpoints[30][1][1]) == type(1)", "as the middle_points = {} # We have to map the available labels", "try: vtk.vtkVersion() except: return None n_points = len(points_list.keys()) points = vtk.vtkPoints() vertices =", "((float, float, float), (float, float, float)), ...} :return: Midpoints of the individual structures", "middle midpoints of labels in the provided image. The midpoints are calculated in", "remove the background label (if such # label actually exists) C_BACKGROUND_LABEL_IDX try: available_labels.remove(C_BACKGROUND_LABEL_IDX)", "Pick the maximum of the distance transform for given segmentation and by this", "points structure. Basically, you can use the resulting vtkPolyData() and know where is", "the segmentation 2) Extract the largest patch of the segmentation as there might", "\"\"\" C_BACKGROUND_LABEL_IDX = 0 # Define the dimensionality, data type and number of", "False >>> type(midpoints[30][0][1]) == type(1.0) True >>> midpoints[183] == ((15.0, 15.0, 15.0), (15,", "pos_itk_transforms >>> example_two_dimensions='<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_two_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_two_dimensions)) >>> itk_image = pos_itk_transforms.read_itk_image(input_filename) >>>", "import itk from possum import pos_itk_core from possum import pos_itk_transforms from possum.pos_common import", "single component and to have a dimensionality of two or three. Images having", "15.0, 15.0), (15, 15, 15)) True >>> midpoints[111] == ((5.0, 5.0, 9.0), (5,", "4) Pick the maximum of the distance transform for given segmentation and by", "False >>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) == type(1.0) True >>> midpoints[183]", "function calculated middle midpoints of labels in the provided image. The midpoints are", "points into a vtkPolyData structure and assigns appropriate label IDs to the individual", "all available labels except the background label which has been removed. The overall", "extract_label.SetInsideValue(1) extract_label.Update() patches = \\ itk.ConnectedComponentImageFilter[ (t_label_img, t_label_img)].New() patches.SetInput(extract_label.GetOutput()) patches.Update() largest_patch = \\", "from possum import pos_itk_transforms from possum.pos_common import r \"\"\" .. note:: Some of", "= label_type[1] assert n_dim in [2, 3], \\ \"Incorrect dimensionality.\" assert number_of_components ==", "distance_transform.SetInput(largest_patch.GetOutput()) distance_transform.InsideIsPositiveOn() distance_transform.Update() centroid = itk.MinimumMaximumImageCalculator[t_float_img].New() centroid.SetImage(distance_transform.GetOutput()) centroid.Compute() centroid.GetIndexOfMaximum() index = centroid.GetIndexOfMaximum() point", "need to remove the background label (if such # label actually exists) C_BACKGROUND_LABEL_IDX", "point' of given label # I call the midpoints 'middle midpoints' not centroids", "them in vtk if they're 3D midpoints. if n_dim == 2: point =", "values correspond to individual structures. Formally this means that the image has to", "id_ = points.InsertNextPoint(pt) vertices.InsertNextCell(1) vertices.InsertCellPoint(id_) id_array.SetTuple1(id_, i) point = vtk.vtkPolyData() point.SetPoints(points) point.SetVerts(vertices) point.GetPointData().AddArray(id_array)", "has been removed. The overall idea of this loop is to: 1) Extract", "== ((13.0, 0.0, 5.0), (13, 0, 5)) True \"\"\" C_BACKGROUND_LABEL_IDX = 0 #", "available_labels.remove(C_BACKGROUND_LABEL_IDX) except: pass # Now iterate over all available labels except the background", "# The results have to be processed in a slightly different way for", "transform for given segmentation and by this define the 'middle point' of given", "t_label_img)].New() extract_label.SetInput(itk_image) extract_label.SetUpperThreshold(label_idx) extract_label.SetLowerThreshold(label_idx) extract_label.SetOutsideValue(0) extract_label.SetInsideValue(1) extract_label.Update() patches = \\ itk.ConnectedComponentImageFilter[ (t_label_img, t_label_img)].New()", "midpoints' not centroids as centroids # are something different and they are calculated", "of the filter below is to define the unique labels # given segmentation", "centroid = itk.MinimumMaximumImageCalculator[t_float_img].New() centroid.SetImage(distance_transform.GetOutput()) centroid.Compute() centroid.GetIndexOfMaximum() index = centroid.GetIndexOfMaximum() point = itk_image.TransformIndexToPhysicalPoint(index) #", "is to: 1) Extract given label from the segmentation 2) Extract the largest", "this function. >>> import base64 >>> from possum import pos_itk_transforms >>> example_two_dimensions='<KEY> >>>", "largest_patch = \\ itk.LabelShapeKeepNObjectsImageFilter[t_label_img].New() largest_patch.SetInput(patches.GetOutput()) largest_patch.SetBackgroundValue(0) largest_patch.SetNumberOfObjects(1) largest_patch.SetAttribute(100) largest_patch.Update() distance_transform = \\ itk.SignedMaurerDistanceMapImageFilter[", "not work if the vtk module is not loaded. :param point_list: List of", "\\ itk.SignedMaurerDistanceMapImageFilter[ (t_label_img, t_float_img)].New() distance_transform.SetInput(largest_patch.GetOutput()) distance_transform.InsideIsPositiveOn() distance_transform.Update() centroid = itk.MinimumMaximumImageCalculator[t_float_img].New() centroid.SetImage(distance_transform.GetOutput()) centroid.Compute() centroid.GetIndexOfMaximum()", "vtkPolyData structure and assigns appropriate label IDs to the individual points of the", "\"Incorrect dimensionality.\" assert number_of_components == 1, \\ \"Only single component images are allowed.\"", "points to turn into vtk points :type point_list: {int: ((float, float, float), (float,", "converts the location of the middle points into a vtkPolyData structure and assigns", "image type. This one is identical # in terms of size and dimensionality", "5.0, 9.0), (5, 5, 9)) True >>> midpoints[53] == ((13.0, 0.0, 5.0), (13,", "n_dim == 2: point = map(float, point) + [0] index = map(int, index)", "= map(int, index) middle_points[label_idx] = (tuple(point), tuple(index)) # Below there is some debugging", "centroid.GetIndexOfMaximum() index = centroid.GetIndexOfMaximum() point = itk_image.TransformIndexToPhysicalPoint(index) # We need to slightly refine", "is not loaded. :param point_list: List of points to turn into vtk points", "allowed.\" assert data_type in [\"unsigned_char\", \"unsigned_short\"], \\ r(\"Incorrect data type for a labelled", "means that the image has to be of `uchar` or `ushort` type, to", "# of the label image label_type = \\ pos_itk_core.io_image_type_to_component_string_name[ itk_image.__class__] # Extract the", ":rtype: {int: ((float, float, float), (float, float, float)), ...} And now it it", ">>> example_three_dimensions=\"<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_three_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_three_dimensions)) >>> itk_image = pos_itk_transforms.read_itk_image(input_filename) >>> midpoints", "is to: # 1) Extract given label from the segmentation # 2) Extract", "be float to handle # the distance transform well. float_type = list(label_type) float_type[1]", "and they are returned as longints # which are apparently incomparibile with python", ">>> from possum import pos_itk_transforms >>> example_two_dimensions='<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_two_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_two_dimensions)) >>>", "be installed. If it is not available the VTK support will be disabled.", "easier to use them in vtk if they're 3D midpoints. if n_dim ==", "to slightly refine the results returned by itk # The results have to", "background label (if such # label actually exists) C_BACKGROUND_LABEL_IDX try: available_labels.remove(C_BACKGROUND_LABEL_IDX) except: pass", "= len(points_list.keys()) points = vtk.vtkPoints() vertices = vtk.vtkCellArray() id_array = vtk.vtkUnsignedCharArray() id_array.SetName(\"Label_ID\") id_array.SetNumberOfComponents(1)", "or three. Images having different properties will not be processed. :type itk_image: `itk.Image`", "except the background label which has been removed. The overall idea of this", "# given segmentation # 4) Pick the maximum of the distance transform for", "distance transform well. float_type = list(label_type) float_type[1] = \"float\" t_float_img = \\ pos_itk_core.io_component_string_name_to_image_type[tuple(float_type)]", ">>> itk_image = pos_itk_transforms.read_itk_image(input_filename) >>> midpoints = calculate_labels_midpoints(itk_image) >>> sorted(midpoints.keys()) == [1, 2,", "given segmentation # 4) Pick the maximum of the distance transform for given", "except: pass # Now iterate over all available labels except the background label", "index) + [0] if n_dim == 3: point = map(float, point) index =", "code. Not really important for everyday # use. # print middle_points.__repr__() return middle_points", "(13, 0, 5)) True \"\"\" C_BACKGROUND_LABEL_IDX = 0 # Define the dimensionality, data", "the dimensionality, data type and number of components # of the label image", "0.0, 5.0), (13, 0, 5)) True \"\"\" C_BACKGROUND_LABEL_IDX = 0 # Define the", "Please also consited this set of unittests as an example how to use", "pass # Now iterate over all available labels except the background label which", "individual structures expressed as vtk.vtkPolyData() :rtype: `vtk.vtkPolyData` \"\"\" try: vtk.vtkVersion() except: return None", "# way. Our center midpoints cannot be called centroids. for label_idx in available_labels:", "True >>> map(int, midpoints[21][0]) == [14, 24, 0] True >>> midpoints[30] == ((0.0,", "important for everyday # use. # print middle_points.__repr__() return middle_points def points_to_vtk_points(points_list): \"\"\"", ">>> midpoints[53] == ((13.0, 0.0, 5.0), (13, 0, 5)) True \"\"\" C_BACKGROUND_LABEL_IDX =", "well as the middle_points = {} # We have to map the available", "patch of the segmentation as there might be multiple disjoint regions colored with", "it a time to do some unit testing. Please also consited this set", "3D midpoints since # it is easier to use them in vtk if", "colored with given label 3) Apply the distance transform to the largest path", "True >>> map(int, midpoints[1][0]) == [14, 0, 0] True >>> map(int, midpoints[21][0]) ==", "I call the midpoints 'middle midpoints' not centroids as centroids # are something", "the largest path with given segmentation 4) Pick the maximum of the distance", "calculating the middle midpoints of the labelled imags. The term 'middle midpoints' is", "if the vtk module is not loaded. :param point_list: List of points to", "We have to map the available labels returned by itk # as sometimes", "(tuple(point), tuple(index)) # Below there is some debugging code. Not really important for", "slightly different for 3D resuls: # Again, we do a lot of explicit", "and check if they are # ok to use in the routine. n_dim", "the first one is returned. One could think that probably a centre of", "index) middle_points[label_idx] = (tuple(point), tuple(index)) # Below there is some debugging code. Not", "and assigns appropriate label IDs to the individual points of the vtk points", "vtk points :type point_list: {int: ((float, float, float), (float, float, float)), ...} :return:", ">>> midpoints[183] == ((15.0, 15.0, 15.0), (15, 15, 15)) True >>> midpoints[111] ==", "len(midpoints.keys()) == 63 True >>> str(midpoints.get(0,None)) == \"None\" True >>> midpoints[1] == ((5.0,", "tuple(index)) # Below there is some debugging code. Not really important for everyday", "= itk_image.GetNumberOfComponentsPerPixel() data_type = label_type[1] assert n_dim in [2, 3], \\ \"Incorrect dimensionality.\"", "in the actual structure or outside the structure. Therefore some of the results", "extract_label.SetInput(itk_image) extract_label.SetUpperThreshold(label_idx) extract_label.SetLowerThreshold(label_idx) extract_label.SetOutsideValue(0) extract_label.SetInsideValue(1) extract_label.Update() patches = \\ itk.ConnectedComponentImageFilter[ (t_label_img, t_label_img)].New() patches.SetInput(extract_label.GetOutput())", "given label # 3) Apply the distance transform to the largest path with", "# I call the midpoints 'middle midpoints' not centroids as centroids # are", "it is not available the VTK support will be disabled. \"\"\" def calculate_labels_midpoints(itk_image):", "of the labels in the image. :rtype: {int: ((float, float, float), (float, float,", "non-cruical, optional functions in this module require vtk module to be installed. If", "Some of the non-cruical, optional functions in this module require vtk module to", "might think that we're calculating centroids here, but not. I use the term", "by itk # as sometimes strange things happen and they are returned as", "that this procedure returns position of the first (index-wise) voxel with the maimum", "the background label which # has been removed. The overall idea of this", "something different and they are calculated in a different # way. Our center", "# location as well as the middle_points = {} # We have to", "of the label image label_type = \\ pos_itk_core.io_image_type_to_component_string_name[ itk_image.__class__] # Extract the details", "been removed. The overall idea of this loop is to: 1) Extract given", "Again, we do a lot of explicit casting assure types # compatibility. The", "input_filename=\"/tmp/pos_itk_centroids_example_two_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_two_dimensions)) >>> itk_image = pos_itk_transforms.read_itk_image(input_filename) >>> midpoints = calculate_labels_midpoints(itk_image) >>>", "type and number of components # of the label image label_type = \\", "as vtk.vtkPolyData() :rtype: `vtk.vtkPolyData` \"\"\" try: vtk.vtkVersion() except: return None n_points = len(points_list.keys())", "available_labels: extract_label = \\ itk.BinaryThresholdImageFilter[ (t_label_img, t_label_img)].New() extract_label.SetInput(itk_image) extract_label.SetUpperThreshold(label_idx) extract_label.SetLowerThreshold(label_idx) extract_label.SetOutsideValue(0) extract_label.SetInsideValue(1) extract_label.Update()", "24, 0] True >>> midpoints[30] == ((0.0, 39.0, 0), (0, 39, 0)) True", "handle # the distance transform well. float_type = list(label_type) float_type[1] = \"float\" t_float_img", "n_dim in [2, 3], \\ \"Incorrect dimensionality.\" assert number_of_components == 1, \\ \"Only", "midpoints' is used on purpose. You might think that we're calculating centroids here,", "and they are calculated in a different # way. Our center midpoints cannot", "to turn into vtk points :type point_list: {int: ((float, float, float), (float, float,", "outside the structure. Therefore some of the results may look wired but they", "centroids here, but not. I use the term 'middle midpoints' as it is", "\"w\").write(base64.decodestring(example_three_dimensions)) >>> itk_image = pos_itk_transforms.read_itk_image(input_filename) >>> midpoints = calculate_labels_midpoints(itk_image) >>> os.remove(input_filename) >>> str(type(midpoints))", "True >>> midpoints[53] == ((13.0, 0.0, 5.0), (13, 0, 5)) True \"\"\" C_BACKGROUND_LABEL_IDX", "for 3D resuls: # Again, we do a lot of explicit casting assure", "in vtk if they're 3D midpoints. if n_dim == 2: point = map(float,", "= \\ pos_itk_core.io_component_string_name_to_image_type[tuple(float_type)] # The purpose of the filter below is to define", "# compatibility. The 2D midpoints are converted into 3D midpoints since # it", "function will not work if the vtk module is not loaded. :param point_list:", "middle_points.__repr__() return middle_points def points_to_vtk_points(points_list): \"\"\" The function converts the location of the", "'middle point' of given label # I call the midpoints 'middle midpoints' not", "of given label # I call the midpoints 'middle midpoints' not centroids as", "1) Extract given label from the segmentation # 2) Extract the largest patch", "(t_label_img, t_label_img)].New() extract_label.SetInput(itk_image) extract_label.SetUpperThreshold(label_idx) extract_label.SetLowerThreshold(label_idx) extract_label.SetOutsideValue(0) extract_label.SetInsideValue(1) extract_label.Update() patches = \\ itk.ConnectedComponentImageFilter[ (t_label_img,", "import base64 >>> from possum import pos_itk_transforms >>> example_two_dimensions='<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_two_dimensions.nii.gz\" >>> open(input_filename,", "\"<type 'dict'>\" True >>> len(midpoints.keys()) == 63 True >>> str(midpoints.get(0,None)) == \"None\" True", "located in the actual structure or outside the structure. Therefore some of the", "patch of the segmentation as there # might be multiple disjoint regions colored", "differe is in data type: this one has to be float to handle", "map(int, index) + [0] if n_dim == 3: point = map(float, point) index", "example how to use this function. >>> import base64 >>> from possum import", "given label 3) Apply the distance transform to the largest path with given", "1, \\ \"Only single component images are allowed.\" assert data_type in [\"unsigned_char\", \"unsigned_short\"],", "is where we'll collect the results. We collect, both, the physical # location", "is in data type: this one has to be float to handle #", "22, 23, 30, 31, 32, 33] True >>> map(int, midpoints[1][0]) == [14, 0,", "have in ming that this procedure returns position of the first (index-wise) voxel", "such centre would be located in the actual structure or outside the structure.", "returned by itk # as sometimes strange things happen and they are returned", "with given label # 3) Apply the distance transform to the largest path", "0, 0] True >>> map(int, midpoints[21][0]) == [14, 24, 0] True >>> midpoints[30]", "and slightly different for 3D resuls: # Again, we do a lot of", "if n_dim == 3: point = map(float, point) index = map(int, index) middle_points[label_idx]", "= map(float, point) + [0] index = map(int, index) + [0] if n_dim", "\"\"\" def calculate_labels_midpoints(itk_image): \"\"\" This function introduces a workflow for calculating the middle", "in type. # Consider it a safety precaution available_labels = map(int, unique_labels.GetLabels()) #", "If it is not available the VTK support will be disabled. \"\"\" def", "0, 0)) True >>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) == type(1) False", "VTK support will be disabled. \"\"\" def calculate_labels_midpoints(itk_image): \"\"\" This function introduces a", "# Define the dimensionality, data type and number of components # of the", "base64 >>> from possum import pos_itk_transforms >>> example_two_dimensions='<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_two_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_two_dimensions))", "the maximum of the distance transform for given segmentation # and by this", "be returned, but no. It is unknown is such centre would be located", "process a 3D image >>> example_three_dimensions=\"<KEY> >>> input_filename=\"/tmp/pos_itk_centroids_example_three_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_three_dimensions)) >>> itk_image", "safety precaution available_labels = map(int, unique_labels.GetLabels()) # Now we need to remove the", "float_type[1] = \"float\" t_float_img = \\ pos_itk_core.io_component_string_name_to_image_type[tuple(float_type)] # The purpose of the filter", "Below there is some debugging code. Not really important for everyday # use.", "the details of the image provided and check if they are # ok", "component and to have a dimensionality of two or three. Images having different", "vtk module to be installed. If it is not available the VTK support", "extract_label.SetOutsideValue(0) extract_label.SetInsideValue(1) extract_label.Update() patches = \\ itk.ConnectedComponentImageFilter[ (t_label_img, t_label_img)].New() patches.SetInput(extract_label.GetOutput()) patches.Update() largest_patch =", "image. # The differe is in data type: this one has to be", "structure. Basically, you can use the resulting vtkPolyData() and know where is a", "templates. t_label_img = itk_image.__class__ # We'll be also using another image type. This", "number of components # of the label image label_type = \\ pos_itk_core.io_image_type_to_component_string_name[ itk_image.__class__]", "63 True >>> str(midpoints.get(0,None)) == \"None\" True >>> midpoints[1] == ((5.0, 0.0, 0.0),", "by this define the 'middle point' of given label. .. note :: Please", "10, 11, 12, 13, 20, 21, 22, 23, 30, 31, 32, 33] True", "assert number_of_components == 1, \\ \"Only single component images are allowed.\" assert data_type", "structures. Formally this means that the image has to be of `uchar` or", "overall idea of this loop is to: # 1) Extract given label from", "unique_labels.GetLabels()) # Now we need to remove the background label (if such #", "== 2: point = map(float, point) + [0] index = map(int, index) +", ">>> os.remove(input_filename) Now we will try to process a 3D image >>> example_three_dimensions=\"<KEY>", "'middle midpoints' as it is not the centroids what is calculated here. Anyway,", "check if they are # ok to use in the routine. n_dim =", "((0.0, 39.0, 0), (0, 39, 0)) True >>> type(midpoints[30][1][1]) == type(1) True >>>", "os.remove(input_filename) Now we will try to process a 3D image >>> example_three_dimensions=\"<KEY> >>>", "image is expected to be a labelled image in which individual discrete values", "require vtk module to be installed. If it is not available the VTK", "to the individual points of the vtk points structure. Basically, you can use", ".. note :: Please have in ming that this procedure returns position of", "is such centre would be located in the actual structure or outside the", "individual structures. Formally this means that the image has to be of `uchar`", "use the resulting vtkPolyData() and know where is a centre of a particular", "resulting vtkPolyData() and know where is a centre of a particular structure. ...", "structure. ... note :: This function will not work if the vtk module", "15.0), (15, 15, 15)) True >>> midpoints[111] == ((5.0, 5.0, 9.0), (5, 5,", "calculate_labels_midpoints(itk_image) >>> sorted(midpoints.keys()) == [1, 2, 3, 10, 11, 12, 13, 20, 21,", "extract_label.Update() patches = \\ itk.ConnectedComponentImageFilter[ (t_label_img, t_label_img)].New() patches.SetInput(extract_label.GetOutput()) patches.Update() largest_patch = \\ itk.LabelShapeKeepNObjectsImageFilter[t_label_img].New()", "with given label 3) Apply the distance transform to the largest path with", "# 1) Extract given label from the segmentation # 2) Extract the largest", "casting assure types # compatibility. The 2D midpoints are converted into 3D midpoints", "distance transform for given segmentation # and by this define the 'middle point'", "(5, 0, 0)) True >>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) == type(1)", "The differe is in data type: this one has to be float to", "map(int, unique_labels.GetLabels()) # Now we need to remove the background label (if such", "of this loop is to: 1) Extract given label from the segmentation 2)", "Extract the largest patch of the segmentation as there # might be multiple", "point = vtk.vtkPolyData() point.SetPoints(points) point.SetVerts(vertices) point.GetPointData().AddArray(id_array) return point if __name__ == '__main__': import", "location as well as the middle_points = {} # We have to map", "the max voxels should be returned, but no. It is unknown is such", "of points to turn into vtk points :type point_list: {int: ((float, float, float),", "be of `uchar` or `ushort` type, to have a single component and to", "we're calculating centroids here, but not. I use the term 'middle midpoints' as", "available labels except the background label which # has been removed. The overall", "what is calculated here. Anyway, this function calculated middle midpoints of labels in", "transform for given segmentation # and by this define the 'middle point' of", "# We'll be also using another image type. This one is identical #", ">>> type(midpoints[30][1][1]) == type(1) True >>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) ==", "except the background label which # has been removed. The overall idea of", "pos_itk_transforms.read_itk_image(input_filename) >>> midpoints = calculate_labels_midpoints(itk_image) >>> os.remove(input_filename) >>> str(type(midpoints)) == \"<type 'dict'>\" True", "this means that the image has to be of `uchar` or `ushort` type,", "image type class to be used in filters # templates. t_label_img = itk_image.__class__", "itk.LabelShapeKeepNObjectsImageFilter[t_label_img].New() largest_patch.SetInput(patches.GetOutput()) largest_patch.SetBackgroundValue(0) largest_patch.SetNumberOfObjects(1) largest_patch.SetAttribute(100) largest_patch.Update() distance_transform = \\ itk.SignedMaurerDistanceMapImageFilter[ (t_label_img, t_float_img)].New() distance_transform.SetInput(largest_patch.GetOutput())", ">>> os.remove(input_filename) >>> str(type(midpoints)) == \"<type 'dict'>\" True >>> len(midpoints.keys()) == 63 True", "= \\ itk.LabelGeometryImageFilter[(t_label_img, t_label_img)].New() unique_labels.SetInput(itk_image) unique_labels.CalculatePixelIndicesOff() unique_labels.Update() # This is where we'll collect", "vtk.vtkCellArray() id_array = vtk.vtkUnsignedCharArray() id_array.SetName(\"Label_ID\") id_array.SetNumberOfComponents(1) id_array.SetNumberOfTuples(n_points) for (i, (pt, idx)) in points_list.items():", "# 4) Pick the maximum of the distance transform for given segmentation #", "they're 3D midpoints. if n_dim == 2: point = map(float, point) + [0]", "module require vtk module to be installed. If it is not available the", "label which has been removed. The overall idea of this loop is to:", "import sys import itk from possum import pos_itk_core from possum import pos_itk_transforms from", "itk_image.TransformIndexToPhysicalPoint(index) # We need to slightly refine the results returned by itk #", "largest patch of the segmentation as there might be multiple disjoint regions colored", "a labelled image in which individual discrete values correspond to individual structures. Formally", "0, 5)) True \"\"\" C_BACKGROUND_LABEL_IDX = 0 # Define the dimensionality, data type", "been removed. The overall idea of this loop is to: # 1) Extract", "the location of the middle points into a vtkPolyData structure and assigns appropriate", "Now iterate over all available labels except the background label which has been", "to use in the routine. n_dim = len(itk_image.GetLargestPossibleRegion().GetSize()) number_of_components = itk_image.GetNumberOfComponentsPerPixel() data_type =", "all available labels except the background label which # has been removed. The", "[2, 3], \\ \"Incorrect dimensionality.\" assert number_of_components == 1, \\ \"Only single component", "and dimensionality as the labelled image. # The differe is in data type:", "types # compatibility. The 2D midpoints are converted into 3D midpoints since #", "one is identical # in terms of size and dimensionality as the labelled", "using another image type. This one is identical # in terms of size", "two or three. Images having different properties will not be processed. :type itk_image:", "vertices = vtk.vtkCellArray() id_array = vtk.vtkUnsignedCharArray() id_array.SetName(\"Label_ID\") id_array.SetNumberOfComponents(1) id_array.SetNumberOfTuples(n_points) for (i, (pt, idx))", "str(midpoints.get(0,None)) == \"None\" True >>> midpoints[1] == ((5.0, 0.0, 0.0), (5, 0, 0))", "location of the middle points into a vtkPolyData structure and assigns appropriate label", "largest path with given segmentation 4) Pick the maximum of the distance transform", "centroids as centroids # are something different and they are calculated in a", "t_label_img is the ITK image type class to be used in filters #", "has to be float to handle # the distance transform well. float_type =", "True >>> os.remove(input_filename) Now we will try to process a 3D image >>>", "will not work if the vtk module is not loaded. :param point_list: List", "midpoints[111] == ((5.0, 5.0, 9.0), (5, 5, 9)) True >>> midpoints[53] == ((13.0,", "True >>> str(midpoints.get(0,None)) == \"None\" True >>> midpoints[1] == ((5.0, 0.0, 0.0), (5,", "as sometimes strange things happen and they are returned as longints # which", "use the term 'middle midpoints' as it is not the centroids what is", "# templates. t_label_img = itk_image.__class__ # We'll be also using another image type.", "which are apparently incomparibile with python in type. # Consider it a safety", "# use. # print middle_points.__repr__() return middle_points def points_to_vtk_points(points_list): \"\"\" The function converts", "\\ itk.ConnectedComponentImageFilter[ (t_label_img, t_label_img)].New() patches.SetInput(extract_label.GetOutput()) patches.Update() largest_patch = \\ itk.LabelShapeKeepNObjectsImageFilter[t_label_img].New() largest_patch.SetInput(patches.GetOutput()) largest_patch.SetBackgroundValue(0) largest_patch.SetNumberOfObjects(1)", "type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) == type(1.0) True >>> midpoints[183] == ((15.0,", "label actually exists) C_BACKGROUND_LABEL_IDX try: available_labels.remove(C_BACKGROUND_LABEL_IDX) except: pass # Now iterate over all", "more than one pixels with the maximum value of the distance transform, location", "largest_patch.Update() distance_transform = \\ itk.SignedMaurerDistanceMapImageFilter[ (t_label_img, t_float_img)].New() distance_transform.SetInput(largest_patch.GetOutput()) distance_transform.InsideIsPositiveOn() distance_transform.Update() centroid = itk.MinimumMaximumImageCalculator[t_float_img].New()", "happen and they are returned as longints # which are apparently incomparibile with", "itk.ConnectedComponentImageFilter[ (t_label_img, t_label_img)].New() patches.SetInput(extract_label.GetOutput()) patches.Update() largest_patch = \\ itk.LabelShapeKeepNObjectsImageFilter[t_label_img].New() largest_patch.SetInput(patches.GetOutput()) largest_patch.SetBackgroundValue(0) largest_patch.SetNumberOfObjects(1) largest_patch.SetAttribute(100)", "[14, 0, 0] True >>> map(int, midpoints[21][0]) == [14, 24, 0] True >>>", "input_filename=\"/tmp/pos_itk_centroids_example_three_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_three_dimensions)) >>> itk_image = pos_itk_transforms.read_itk_image(input_filename) >>> midpoints = calculate_labels_midpoints(itk_image) >>>", "9)) True >>> midpoints[53] == ((13.0, 0.0, 5.0), (13, 0, 5)) True \"\"\"", "# We need to slightly refine the results returned by itk # The", "the actual structure or outside the structure. Therefore some of the results may", "of the segmentation as there might be multiple disjoint regions colored with given", "midpoints' as it is not the centroids what is calculated here. Anyway, this", "the results returned by itk # The results have to be processed in", "as it is not the centroids what is calculated here. Anyway, this function", "precaution available_labels = map(int, unique_labels.GetLabels()) # Now we need to remove the background", "r(\"Incorrect data type for a labelled image only unsigned_char\\ and unsigned_short are accepted.\")", "individual points of the vtk points structure. Basically, you can use the resulting", "data_type = label_type[1] assert n_dim in [2, 3], \\ \"Incorrect dimensionality.\" assert number_of_components", "vtk module is not loaded. :param point_list: List of points to turn into", "if there is more than one pixels with the maximum value of the", ">>> input_filename=\"/tmp/pos_itk_centroids_example_three_dimensions.nii.gz\" >>> open(input_filename, \"w\").write(base64.decodestring(example_three_dimensions)) >>> itk_image = pos_itk_transforms.read_itk_image(input_filename) >>> midpoints = calculate_labels_midpoints(itk_image)", "midpoints of the labelled imags. The term 'middle midpoints' is used on purpose.", "image in which individual discrete values correspond to individual structures. Formally this means", "the distance transform for given segmentation # and by this define the 'middle", "unique_labels.Update() # This is where we'll collect the results. We collect, both, the", "point = itk_image.TransformIndexToPhysicalPoint(index) # We need to slightly refine the results returned by", "= vtk.vtkPoints() vertices = vtk.vtkCellArray() id_array = vtk.vtkUnsignedCharArray() id_array.SetName(\"Label_ID\") id_array.SetNumberOfComponents(1) id_array.SetNumberOfTuples(n_points) for (i,", "no. It is unknown is such centre would be located in the actual", "the background label (if such # label actually exists) C_BACKGROUND_LABEL_IDX try: available_labels.remove(C_BACKGROUND_LABEL_IDX) except:", ">>> midpoints = calculate_labels_midpoints(itk_image) >>> os.remove(input_filename) >>> str(type(midpoints)) == \"<type 'dict'>\" True >>>", "# The differe is in data type: this one has to be float", "labels in the provided image. The midpoints are calculated in the following way:", "are calculated in the following way: Now iterate over all available labels except", "C_BACKGROUND_LABEL_IDX = 0 # Define the dimensionality, data type and number of components", "to remove the background label (if such # label actually exists) C_BACKGROUND_LABEL_IDX try:", "== ((5.0, 0.0, 0.0), (5, 0, 0)) True >>> type(midpoints[30][0][1]) == type(1) False", "where we'll collect the results. We collect, both, the physical # location as", "def points_to_vtk_points(points_list): \"\"\" The function converts the location of the middle points into", "3D resuls: # Again, we do a lot of explicit casting assure types", "The function converts the location of the middle points into a vtkPolyData structure", "vertices.InsertCellPoint(id_) id_array.SetTuple1(id_, i) point = vtk.vtkPolyData() point.SetPoints(points) point.SetVerts(vertices) point.GetPointData().AddArray(id_array) return point if __name__", "look wired but they are actually ok. :param itk_image: Labelled image, the image", "think that probably a centre of mass of the max voxels should be", "to use this function. >>> import base64 >>> from possum import pos_itk_transforms >>>", "if n_dim == 2: point = map(float, point) + [0] index = map(int,", "image, the image is expected to be a labelled image in which individual", "This function introduces a workflow for calculating the middle midpoints of the labelled", "transform, location of the first one is returned. One could think that probably", "4) Pick the maximum of the distance transform for given segmentation # and", "the filter below is to define the unique labels # given segmentation contains.", "r \"\"\" .. note:: Some of the non-cruical, optional functions in this module", "True >>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1])", "(index-wise) voxel with the maimum value. This means that if there is more", "= \"float\" t_float_img = \\ pos_itk_core.io_component_string_name_to_image_type[tuple(float_type)] # The purpose of the filter below", "sorted(midpoints.keys()) == [1, 2, 3, 10, 11, 12, 13, 20, 21, 22, 23,", "of given label. .. note :: Please have in ming that this procedure", "= calculate_labels_midpoints(itk_image) >>> sorted(midpoints.keys()) == [1, 2, 3, 10, 11, 12, 13, 20,", "return middle_points def points_to_vtk_points(points_list): \"\"\" The function converts the location of the middle", "patches = \\ itk.ConnectedComponentImageFilter[ (t_label_img, t_label_img)].New() patches.SetInput(extract_label.GetOutput()) patches.Update() largest_patch = \\ itk.LabelShapeKeepNObjectsImageFilter[t_label_img].New() largest_patch.SetInput(patches.GetOutput())", "means that if there is more than one pixels with the maximum value", "call the midpoints 'middle midpoints' not centroids as centroids # are something different", "0)) True >>> type(midpoints[30][0][1]) == type(1) False >>> type(midpoints[30][0][1]) == type(1) False >>>", "to have a single component and to have a dimensionality of two or", "in the image. :rtype: {int: ((float, float, float), (float, float, float)), ...} And", "2, 3, 10, 11, 12, 13, 20, 21, 22, 23, 30, 31, 32,", "to: # 1) Extract given label from the segmentation # 2) Extract the", ">>> itk_image = pos_itk_transforms.read_itk_image(input_filename) >>> midpoints = calculate_labels_midpoints(itk_image) >>> os.remove(input_filename) >>> str(type(midpoints)) ==", "type. # Consider it a safety precaution available_labels = map(int, unique_labels.GetLabels()) # Now", "0] True >>> map(int, midpoints[21][0]) == [14, 24, 0] True >>> midpoints[30] ==", "True >>> midpoints[1] == ((5.0, 0.0, 0.0), (5, 0, 0)) True >>> type(midpoints[30][0][1])", "compatibility. The 2D midpoints are converted into 3D midpoints since # it is", "here, but not. I use the term 'middle midpoints' as it is not", "labelled imags. The term 'middle midpoints' is used on purpose. You might think", "And now it it a time to do some unit testing. Please also", "in the following way: Now iterate over all available labels except the background", "component images are allowed.\" assert data_type in [\"unsigned_char\", \"unsigned_short\"], \\ r(\"Incorrect data type", "maximum of the distance transform for given segmentation and by this define the", "is a centre of a particular structure. ... note :: This function will", "# in terms of size and dimensionality as the labelled image. # The", "a dimensionality of two or three. Images having different properties will not be", "map(float, point) + [0] index = map(int, index) + [0] if n_dim ==", "== ((5.0, 5.0, 9.0), (5, 5, 9)) True >>> midpoints[53] == ((13.0, 0.0,", ">>> midpoints[111] == ((5.0, 5.0, 9.0), (5, 5, 9)) True >>> midpoints[53] ==", "# it is easier to use them in vtk if they're 3D midpoints.", "segmentation # and by this define the 'middle point' of given label #", "the resulting vtkPolyData() and know where is a centre of a particular structure.", "returned as longints # which are apparently incomparibile with python in type. #", "return None n_points = len(points_list.keys()) points = vtk.vtkPoints() vertices = vtk.vtkCellArray() id_array =", "15, 15)) True >>> midpoints[111] == ((5.0, 5.0, 9.0), (5, 5, 9)) True", "unique_labels.SetInput(itk_image) unique_labels.CalculatePixelIndicesOff() unique_labels.Update() # This is where we'll collect the results. We collect,", "the maximum of the distance transform for given segmentation and by this define", "type, to have a single component and to have a dimensionality of two", "you can use the resulting vtkPolyData() and know where is a centre of", "t_float_img)].New() distance_transform.SetInput(largest_patch.GetOutput()) distance_transform.InsideIsPositiveOn() distance_transform.Update() centroid = itk.MinimumMaximumImageCalculator[t_float_img].New() centroid.SetImage(distance_transform.GetOutput()) centroid.Compute() centroid.GetIndexOfMaximum() index = centroid.GetIndexOfMaximum()", "of the distance transform for given segmentation and by this define the 'middle", "number_of_components = itk_image.GetNumberOfComponentsPerPixel() data_type = label_type[1] assert n_dim in [2, 3], \\ \"Incorrect", "use this function. >>> import base64 >>> from possum import pos_itk_transforms >>> example_two_dimensions='<KEY>", "of the vtk points structure. Basically, you can use the resulting vtkPolyData() and", "longints # which are apparently incomparibile with python in type. # Consider it", "image. :rtype: {int: ((float, float, float), (float, float, float)), ...} And now it", "would be located in the actual structure or outside the structure. Therefore some", "structure and assigns appropriate label IDs to the individual points of the vtk", "{} # We have to map the available labels returned by itk #", "# and by this define the 'middle point' of given label # I", "Please have in ming that this procedure returns position of the first (index-wise)", "Middle midpoints of the labels in the image. :rtype: {int: ((float, float, float),", "as there might be multiple disjoint regions colored with given label 3) Apply", "different and they are calculated in a different # way. Our center midpoints", ">>> str(midpoints.get(0,None)) == \"None\" True >>> midpoints[1] == ((5.0, 0.0, 0.0), (5, 0," ]
[ "import TransformerMixin class activeTrans(TransformerMixin): def __init__(self, ifSData=False, Trans_in=True, Multi_col=False, New_Trans=None, Reset_default=False): self.Trans_in =", "def fit(self, X, y=None): if self.New_Trans != None: X.transformer = self.New_Trans if self.ifSData", "= self.New_Trans if self.ifSData == True: trans = X.transformer trans.fit(X) self.trans = trans", "= Reset_default self.ifSData = ifSData def fit(self, X, y=None): if self.New_Trans != None:", "fit_transform(self, X, y=None): if self.New_Trans != None: X.transformer = self.New_Trans if self.ifSData ==", "elif self.ifSData == False: self.trans = [] for i in X.data: self.trans.append(i.transformer.fit(i)) def", "self.trans.append(i.transformer.fit(i)) def transform(self, X, y=None): if self.New_Trans != None: X.transformer = self.New_Trans if", "= self.trans[0].transform(X.data[0]) for i in range(2, len(X.data)): features.join(self.trans[i].transform(X.data[i])) return features def fit_transform(self, X,", "Reset_default=False): self.Trans_in = Trans_in self.Multi_col = Multi_col self.New_Trans = New_Trans self.Reset_default = Reset_default", "X.data: self.trans.append(i.transformer) features = self.trans[0].fit_transform(X.data[0]) for i in range(2, len(X.data)): features.join(self.trans[i].fit_transform(X.data[i])) return features", "trans.fit(X) self.trans = trans elif self.ifSData == False: self.trans = [] for i", "if self.New_Trans != None: X.transformer = self.New_Trans if self.ifSData == True: return self.trans.transform(X)", "class activeTrans(TransformerMixin): def __init__(self, ifSData=False, Trans_in=True, Multi_col=False, New_Trans=None, Reset_default=False): self.Trans_in = Trans_in self.Multi_col", "= [] for i in X.data: self.trans.append(i.transformer.fit(i)) def transform(self, X, y=None): if self.New_Trans", "transform(self, X, y=None): if self.New_Trans != None: X.transformer = self.New_Trans if self.ifSData ==", "elif self.ifSData == False: features = self.trans[0].transform(X.data[0]) for i in range(2, len(X.data)): features.join(self.trans[i].transform(X.data[i]))", "self.Reset_default = Reset_default self.ifSData = ifSData def fit(self, X, y=None): if self.New_Trans !=", "from sklearn.base import TransformerMixin class activeTrans(TransformerMixin): def __init__(self, ifSData=False, Trans_in=True, Multi_col=False, New_Trans=None, Reset_default=False):", "return self.trans.fit_transform(X) elif self.ifSData == False: self.trans = [] for i in X.data:", "self.ifSData == False: features = self.trans[0].transform(X.data[0]) for i in range(2, len(X.data)): features.join(self.trans[i].transform(X.data[i])) return", "self.trans = X.transformer return self.trans.fit_transform(X) elif self.ifSData == False: self.trans = [] for", "X.transformer return self.trans.fit_transform(X) elif self.ifSData == False: self.trans = [] for i in", "TransformerMixin class activeTrans(TransformerMixin): def __init__(self, ifSData=False, Trans_in=True, Multi_col=False, New_Trans=None, Reset_default=False): self.Trans_in = Trans_in", "Trans_in self.Multi_col = Multi_col self.New_Trans = New_Trans self.Reset_default = Reset_default self.ifSData = ifSData", "sklearn.base import TransformerMixin class activeTrans(TransformerMixin): def __init__(self, ifSData=False, Trans_in=True, Multi_col=False, New_Trans=None, Reset_default=False): self.Trans_in", "activeTrans(TransformerMixin): def __init__(self, ifSData=False, Trans_in=True, Multi_col=False, New_Trans=None, Reset_default=False): self.Trans_in = Trans_in self.Multi_col =", "== False: self.trans = [] for i in X.data: self.trans.append(i.transformer.fit(i)) def transform(self, X,", "self.New_Trans != None: X.transformer = self.New_Trans if self.ifSData == True: trans = X.transformer", "if self.New_Trans != None: X.transformer = self.New_Trans if self.ifSData == True: self.trans =", "__init__(self, ifSData=False, Trans_in=True, Multi_col=False, New_Trans=None, Reset_default=False): self.Trans_in = Trans_in self.Multi_col = Multi_col self.New_Trans", "True: return self.trans.transform(X) elif self.ifSData == False: features = self.trans[0].transform(X.data[0]) for i in", "ifSData=False, Trans_in=True, Multi_col=False, New_Trans=None, Reset_default=False): self.Trans_in = Trans_in self.Multi_col = Multi_col self.New_Trans =", "return self.trans.transform(X) elif self.ifSData == False: features = self.trans[0].transform(X.data[0]) for i in range(2,", "i in X.data: self.trans.append(i.transformer) features = self.trans[0].fit_transform(X.data[0]) for i in range(2, len(X.data)): features.join(self.trans[i].fit_transform(X.data[i]))", "features.join(self.trans[i].transform(X.data[i])) return features def fit_transform(self, X, y=None): if self.New_Trans != None: X.transformer =", "== True: trans = X.transformer trans.fit(X) self.trans = trans elif self.ifSData == False:", "Multi_col=False, New_Trans=None, Reset_default=False): self.Trans_in = Trans_in self.Multi_col = Multi_col self.New_Trans = New_Trans self.Reset_default", "self.ifSData == False: self.trans = [] for i in X.data: self.trans.append(i.transformer) features =", "if self.ifSData == True: trans = X.transformer trans.fit(X) self.trans = trans elif self.ifSData", "= Trans_in self.Multi_col = Multi_col self.New_Trans = New_Trans self.Reset_default = Reset_default self.ifSData =", "trans elif self.ifSData == False: self.trans = [] for i in X.data: self.trans.append(i.transformer.fit(i))", "== True: return self.trans.transform(X) elif self.ifSData == False: features = self.trans[0].transform(X.data[0]) for i", "self.Trans_in = Trans_in self.Multi_col = Multi_col self.New_Trans = New_Trans self.Reset_default = Reset_default self.ifSData", "self.New_Trans != None: X.transformer = self.New_Trans if self.ifSData == True: return self.trans.transform(X) elif", "def __init__(self, ifSData=False, Trans_in=True, Multi_col=False, New_Trans=None, Reset_default=False): self.Trans_in = Trans_in self.Multi_col = Multi_col", "self.ifSData == False: self.trans = [] for i in X.data: self.trans.append(i.transformer.fit(i)) def transform(self,", "y=None): if self.New_Trans != None: X.transformer = self.New_Trans if self.ifSData == True: return", "if self.ifSData == True: self.trans = X.transformer return self.trans.fit_transform(X) elif self.ifSData == False:", "= self.New_Trans if self.ifSData == True: return self.trans.transform(X) elif self.ifSData == False: features", "False: self.trans = [] for i in X.data: self.trans.append(i.transformer.fit(i)) def transform(self, X, y=None):", "fit(self, X, y=None): if self.New_Trans != None: X.transformer = self.New_Trans if self.ifSData ==", "= self.New_Trans if self.ifSData == True: self.trans = X.transformer return self.trans.fit_transform(X) elif self.ifSData", "= Multi_col self.New_Trans = New_Trans self.Reset_default = Reset_default self.ifSData = ifSData def fit(self,", "= New_Trans self.Reset_default = Reset_default self.ifSData = ifSData def fit(self, X, y=None): if", "Trans_in=True, Multi_col=False, New_Trans=None, Reset_default=False): self.Trans_in = Trans_in self.Multi_col = Multi_col self.New_Trans = New_Trans", "in X.data: self.trans.append(i.transformer.fit(i)) def transform(self, X, y=None): if self.New_Trans != None: X.transformer =", "return features def fit_transform(self, X, y=None): if self.New_Trans != None: X.transformer = self.New_Trans", "self.ifSData == True: self.trans = X.transformer return self.trans.fit_transform(X) elif self.ifSData == False: self.trans", "for i in X.data: self.trans.append(i.transformer.fit(i)) def transform(self, X, y=None): if self.New_Trans != None:", "for i in range(2, len(X.data)): features.join(self.trans[i].transform(X.data[i])) return features def fit_transform(self, X, y=None): if", "True: trans = X.transformer trans.fit(X) self.trans = trans elif self.ifSData == False: self.trans", "= ifSData def fit(self, X, y=None): if self.New_Trans != None: X.transformer = self.New_Trans", "!= None: X.transformer = self.New_Trans if self.ifSData == True: trans = X.transformer trans.fit(X)", "New_Trans self.Reset_default = Reset_default self.ifSData = ifSData def fit(self, X, y=None): if self.New_Trans", "len(X.data)): features.join(self.trans[i].transform(X.data[i])) return features def fit_transform(self, X, y=None): if self.New_Trans != None: X.transformer", "self.trans = [] for i in X.data: self.trans.append(i.transformer.fit(i)) def transform(self, X, y=None): if", "trans = X.transformer trans.fit(X) self.trans = trans elif self.ifSData == False: self.trans =", "elif self.ifSData == False: self.trans = [] for i in X.data: self.trans.append(i.transformer) features", "self.ifSData = ifSData def fit(self, X, y=None): if self.New_Trans != None: X.transformer =", "self.ifSData == True: return self.trans.transform(X) elif self.ifSData == False: features = self.trans[0].transform(X.data[0]) for", "= X.transformer trans.fit(X) self.trans = trans elif self.ifSData == False: self.trans = []", "None: X.transformer = self.New_Trans if self.ifSData == True: self.trans = X.transformer return self.trans.fit_transform(X)", "in X.data: self.trans.append(i.transformer) features = self.trans[0].fit_transform(X.data[0]) for i in range(2, len(X.data)): features.join(self.trans[i].fit_transform(X.data[i])) return", "features = self.trans[0].transform(X.data[0]) for i in range(2, len(X.data)): features.join(self.trans[i].transform(X.data[i])) return features def fit_transform(self,", "range(2, len(X.data)): features.join(self.trans[i].transform(X.data[i])) return features def fit_transform(self, X, y=None): if self.New_Trans != None:", "self.trans = trans elif self.ifSData == False: self.trans = [] for i in", "self.trans.transform(X) elif self.ifSData == False: features = self.trans[0].transform(X.data[0]) for i in range(2, len(X.data)):", "def transform(self, X, y=None): if self.New_Trans != None: X.transformer = self.New_Trans if self.ifSData", "None: X.transformer = self.New_Trans if self.ifSData == True: return self.trans.transform(X) elif self.ifSData ==", "Reset_default self.ifSData = ifSData def fit(self, X, y=None): if self.New_Trans != None: X.transformer", "[] for i in X.data: self.trans.append(i.transformer.fit(i)) def transform(self, X, y=None): if self.New_Trans !=", "!= None: X.transformer = self.New_Trans if self.ifSData == True: return self.trans.transform(X) elif self.ifSData", "for i in X.data: self.trans.append(i.transformer) features = self.trans[0].fit_transform(X.data[0]) for i in range(2, len(X.data)):", "<filename>MLSD/Transformers/activeTrans.py from sklearn.base import TransformerMixin class activeTrans(TransformerMixin): def __init__(self, ifSData=False, Trans_in=True, Multi_col=False, New_Trans=None,", "self.New_Trans if self.ifSData == True: self.trans = X.transformer return self.trans.fit_transform(X) elif self.ifSData ==", "self.trans = [] for i in X.data: self.trans.append(i.transformer) features = self.trans[0].fit_transform(X.data[0]) for i", "X.transformer = self.New_Trans if self.ifSData == True: return self.trans.transform(X) elif self.ifSData == False:", "if self.ifSData == True: return self.trans.transform(X) elif self.ifSData == False: features = self.trans[0].transform(X.data[0])", "if self.New_Trans != None: X.transformer = self.New_Trans if self.ifSData == True: trans =", "== False: self.trans = [] for i in X.data: self.trans.append(i.transformer) features = self.trans[0].fit_transform(X.data[0])", "i in range(2, len(X.data)): features.join(self.trans[i].transform(X.data[i])) return features def fit_transform(self, X, y=None): if self.New_Trans", "X.transformer = self.New_Trans if self.ifSData == True: self.trans = X.transformer return self.trans.fit_transform(X) elif", "def fit_transform(self, X, y=None): if self.New_Trans != None: X.transformer = self.New_Trans if self.ifSData", "= [] for i in X.data: self.trans.append(i.transformer) features = self.trans[0].fit_transform(X.data[0]) for i in", "New_Trans=None, Reset_default=False): self.Trans_in = Trans_in self.Multi_col = Multi_col self.New_Trans = New_Trans self.Reset_default =", "self.ifSData == True: trans = X.transformer trans.fit(X) self.trans = trans elif self.ifSData ==", "X.data: self.trans.append(i.transformer.fit(i)) def transform(self, X, y=None): if self.New_Trans != None: X.transformer = self.New_Trans", "self.Multi_col = Multi_col self.New_Trans = New_Trans self.Reset_default = Reset_default self.ifSData = ifSData def", "True: self.trans = X.transformer return self.trans.fit_transform(X) elif self.ifSData == False: self.trans = []", "== True: self.trans = X.transformer return self.trans.fit_transform(X) elif self.ifSData == False: self.trans =", "y=None): if self.New_Trans != None: X.transformer = self.New_Trans if self.ifSData == True: trans", "X.transformer = self.New_Trans if self.ifSData == True: trans = X.transformer trans.fit(X) self.trans =", "False: self.trans = [] for i in X.data: self.trans.append(i.transformer) features = self.trans[0].fit_transform(X.data[0]) for", "self.trans[0].transform(X.data[0]) for i in range(2, len(X.data)): features.join(self.trans[i].transform(X.data[i])) return features def fit_transform(self, X, y=None):", "!= None: X.transformer = self.New_Trans if self.ifSData == True: self.trans = X.transformer return", "features def fit_transform(self, X, y=None): if self.New_Trans != None: X.transformer = self.New_Trans if", "None: X.transformer = self.New_Trans if self.ifSData == True: trans = X.transformer trans.fit(X) self.trans", "= X.transformer return self.trans.fit_transform(X) elif self.ifSData == False: self.trans = [] for i", "self.New_Trans = New_Trans self.Reset_default = Reset_default self.ifSData = ifSData def fit(self, X, y=None):", "self.New_Trans if self.ifSData == True: trans = X.transformer trans.fit(X) self.trans = trans elif", "X, y=None): if self.New_Trans != None: X.transformer = self.New_Trans if self.ifSData == True:", "in range(2, len(X.data)): features.join(self.trans[i].transform(X.data[i])) return features def fit_transform(self, X, y=None): if self.New_Trans !=", "= trans elif self.ifSData == False: self.trans = [] for i in X.data:", "[] for i in X.data: self.trans.append(i.transformer) features = self.trans[0].fit_transform(X.data[0]) for i in range(2,", "self.New_Trans != None: X.transformer = self.New_Trans if self.ifSData == True: self.trans = X.transformer", "self.New_Trans if self.ifSData == True: return self.trans.transform(X) elif self.ifSData == False: features =", "y=None): if self.New_Trans != None: X.transformer = self.New_Trans if self.ifSData == True: self.trans", "i in X.data: self.trans.append(i.transformer.fit(i)) def transform(self, X, y=None): if self.New_Trans != None: X.transformer", "X.transformer trans.fit(X) self.trans = trans elif self.ifSData == False: self.trans = [] for", "self.trans.fit_transform(X) elif self.ifSData == False: self.trans = [] for i in X.data: self.trans.append(i.transformer)", "Multi_col self.New_Trans = New_Trans self.Reset_default = Reset_default self.ifSData = ifSData def fit(self, X,", "== False: features = self.trans[0].transform(X.data[0]) for i in range(2, len(X.data)): features.join(self.trans[i].transform(X.data[i])) return features", "ifSData def fit(self, X, y=None): if self.New_Trans != None: X.transformer = self.New_Trans if", "False: features = self.trans[0].transform(X.data[0]) for i in range(2, len(X.data)): features.join(self.trans[i].transform(X.data[i])) return features def" ]
[ "wraps # third party packages from flask import Flask, jsonify, abort, request, Response", "realm=\"Login Required\"'}) def requires_auth(f): @wraps(f) def decorated(*args, **kwargs): auth = request.authorization if not", "is None): abort() user = User(request.json['username'], request.json['email']) db.session.add(user) db.session.commit() return jsonify({'user': user.to_dict()}), 201", "email def __repr__(self): return '<User {}>'.format(self.username) def to_dict(self): return { 'id': self.id, 'username':", "Response( 'Could not verify your access level for that URL.\\n' 'You have to", "that URL.\\n' 'You have to login with proper credentials', 401, {'WWW-Authenticate': 'Basic realm=\"Login", "username == 'admin' and password == '<PASSWORD>' def authenticate(): return Response( 'Could not", "elif request.method == 'GET': users = User.query.all() users_dto = [user.to_dict() for user in", "\"Method not supported\") @app.errorhandler(405) def custom405(error): response = jsonify({'message': error.description}) return response, 405", "\"\"\" id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True) email = db.Column(db.String(120), unique=True)", "\"\"\" Represent a user in database \"\"\" id = db.Column(db.Integer, primary_key=True) username =", "response = jsonify({'message': error.description}) return response, 405 if __name__ == '__main__': if app.debug:", "'You have to login with proper credentials', 401, {'WWW-Authenticate': 'Basic realm=\"Login Required\"'}) def", "methods=['GET', 'POST']) def get_users(): if request.method == 'POST': if (request.json['username'] is None or", "user in database \"\"\" id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True) email", "'POST': if (request.json['username'] is None or request.json['email'] is None): abort() user = User(request.json['username'],", "# endregion @app.route('/') @requires_auth def hello_world(): return 'Hello World!' @app.route('/users', methods=['GET', 'POST']) def", "your access level for that URL.\\n' 'You have to login with proper credentials',", "not verify your access level for that URL.\\n' 'You have to login with", "return 'Hello World!' @app.route('/users', methods=['GET', 'POST']) def get_users(): if request.method == 'POST': if", "= True app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db' app.secret_key = '<KEY>' db = SQLAlchemy(app) # region", "SQLAlchemy(app) # region dbClasses class User(db.Model): \"\"\" Represent a user in database \"\"\"", "unique=True) def __init__(self, username, email): self.username = username self.email = email def __repr__(self):", "authenticate(): return Response( 'Could not verify your access level for that URL.\\n' 'You", "'<User {}>'.format(self.username) def to_dict(self): return { 'id': self.id, 'username': self.username, 'email': self.email }", "Required\"'}) def requires_auth(f): @wraps(f) def decorated(*args, **kwargs): auth = request.authorization if not auth", "f(*args, **kwargs) return decorated # endregion @app.route('/') @requires_auth def hello_world(): return 'Hello World!'", "= '<KEY>' db = SQLAlchemy(app) # region dbClasses class User(db.Model): \"\"\" Represent a", "{ 'id': self.id, 'username': self.username, 'email': self.email } # endregion # region authorization", "'POST']) def get_users(): if request.method == 'POST': if (request.json['username'] is None or request.json['email']", "user.to_dict()}), 201 elif request.method == 'GET': users = User.query.all() users_dto = [user.to_dict() for", "decorated # endregion @app.route('/') @requires_auth def hello_world(): return 'Hello World!' @app.route('/users', methods=['GET', 'POST'])", "database \"\"\" id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True) email = db.Column(db.String(120),", "'Could not verify your access level for that URL.\\n' 'You have to login", "= request.authorization if not auth or not check_auth(auth.username, auth.password): return authenticate() return f(*args,", "auth = request.authorization if not auth or not check_auth(auth.username, auth.password): return authenticate() return", "**kwargs): auth = request.authorization if not auth or not check_auth(auth.username, auth.password): return authenticate()", "Represent a user in database \"\"\" id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80),", "class User(db.Model): \"\"\" Represent a user in database \"\"\" id = db.Column(db.Integer, primary_key=True)", "@requires_auth def hello_world(): return 'Hello World!' @app.route('/users', methods=['GET', 'POST']) def get_users(): if request.method", "db.session.add(user) db.session.commit() return jsonify({'user': user.to_dict()}), 201 elif request.method == 'GET': users = User.query.all()", "= 'sqlite:///test.db' app.secret_key = '<KEY>' db = SQLAlchemy(app) # region dbClasses class User(db.Model):", "= jsonify({'message': error.description}) return response, 405 if __name__ == '__main__': if app.debug: app.run()", "== '<PASSWORD>' def authenticate(): return Response( 'Could not verify your access level for", "**kwargs) return decorated # endregion @app.route('/') @requires_auth def hello_world(): return 'Hello World!' @app.route('/users',", "return authenticate() return f(*args, **kwargs) return decorated # endregion @app.route('/') @requires_auth def hello_world():", "for that URL.\\n' 'You have to login with proper credentials', 401, {'WWW-Authenticate': 'Basic", "= [user.to_dict() for user in users] return jsonify({'users': users_dto}), 200 else: abort(405, \"Method", "email): self.username = username self.email = email def __repr__(self): return '<User {}>'.format(self.username) def", "abort, request, Response from flask.ext.sqlalchemy import SQLAlchemy app = Flask(__name__) app.debug = True", "jsonify({'message': error.description}) return response, 405 if __name__ == '__main__': if app.debug: app.run() else:", "flask.ext.sqlalchemy import SQLAlchemy app = Flask(__name__) app.debug = True app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db' app.secret_key", "email = db.Column(db.String(120), unique=True) def __init__(self, username, email): self.username = username self.email =", "= Flask(__name__) app.debug = True app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db' app.secret_key = '<KEY>' db =", "auth or not check_auth(auth.username, auth.password): return authenticate() return f(*args, **kwargs) return decorated #", "201 elif request.method == 'GET': users = User.query.all() users_dto = [user.to_dict() for user", "import wraps # third party packages from flask import Flask, jsonify, abort, request,", "{'WWW-Authenticate': 'Basic realm=\"Login Required\"'}) def requires_auth(f): @wraps(f) def decorated(*args, **kwargs): auth = request.authorization", "user in users] return jsonify({'users': users_dto}), 200 else: abort(405, \"Method not supported\") @app.errorhandler(405)", "# region authorization def check_auth(username, password): return username == 'admin' and password ==", "username = db.Column(db.String(80), unique=True) email = db.Column(db.String(120), unique=True) def __init__(self, username, email): self.username", "username, email): self.username = username self.email = email def __repr__(self): return '<User {}>'.format(self.username)", "def authenticate(): return Response( 'Could not verify your access level for that URL.\\n'", "if not auth or not check_auth(auth.username, auth.password): return authenticate() return f(*args, **kwargs) return", "third party packages from flask import Flask, jsonify, abort, request, Response from flask.ext.sqlalchemy", "@app.route('/') @requires_auth def hello_world(): return 'Hello World!' @app.route('/users', methods=['GET', 'POST']) def get_users(): if", "id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True) email = db.Column(db.String(120), unique=True) def", "def requires_auth(f): @wraps(f) def decorated(*args, **kwargs): auth = request.authorization if not auth or", "primary_key=True) username = db.Column(db.String(80), unique=True) email = db.Column(db.String(120), unique=True) def __init__(self, username, email):", "flask import Flask, jsonify, abort, request, Response from flask.ext.sqlalchemy import SQLAlchemy app =", "app.secret_key = '<KEY>' db = SQLAlchemy(app) # region dbClasses class User(db.Model): \"\"\" Represent", "hello_world(): return 'Hello World!' @app.route('/users', methods=['GET', 'POST']) def get_users(): if request.method == 'POST':", "'username': self.username, 'email': self.email } # endregion # region authorization def check_auth(username, password):", "@wraps(f) def decorated(*args, **kwargs): auth = request.authorization if not auth or not check_auth(auth.username,", "return '<User {}>'.format(self.username) def to_dict(self): return { 'id': self.id, 'username': self.username, 'email': self.email", "self.email } # endregion # region authorization def check_auth(username, password): return username ==", "= db.Column(db.String(120), unique=True) def __init__(self, username, email): self.username = username self.email = email", "abort() user = User(request.json['username'], request.json['email']) db.session.add(user) db.session.commit() return jsonify({'user': user.to_dict()}), 201 elif request.method", "proper credentials', 401, {'WWW-Authenticate': 'Basic realm=\"Login Required\"'}) def requires_auth(f): @wraps(f) def decorated(*args, **kwargs):", "app = Flask(__name__) app.debug = True app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db' app.secret_key = '<KEY>' db", "decorated(*args, **kwargs): auth = request.authorization if not auth or not check_auth(auth.username, auth.password): return", "db = SQLAlchemy(app) # region dbClasses class User(db.Model): \"\"\" Represent a user in", "return Response( 'Could not verify your access level for that URL.\\n' 'You have", "def to_dict(self): return { 'id': self.id, 'username': self.username, 'email': self.email } # endregion", "= db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True) email = db.Column(db.String(120), unique=True) def __init__(self,", "'Hello World!' @app.route('/users', methods=['GET', 'POST']) def get_users(): if request.method == 'POST': if (request.json['username']", "request.json['email'] is None): abort() user = User(request.json['username'], request.json['email']) db.session.add(user) db.session.commit() return jsonify({'user': user.to_dict()}),", "region dbClasses class User(db.Model): \"\"\" Represent a user in database \"\"\" id =", "== 'GET': users = User.query.all() users_dto = [user.to_dict() for user in users] return", "# region dbClasses class User(db.Model): \"\"\" Represent a user in database \"\"\" id", "or not check_auth(auth.username, auth.password): return authenticate() return f(*args, **kwargs) return decorated # endregion", "jsonify, abort, request, Response from flask.ext.sqlalchemy import SQLAlchemy app = Flask(__name__) app.debug =", "Flask, jsonify, abort, request, Response from flask.ext.sqlalchemy import SQLAlchemy app = Flask(__name__) app.debug", "User(request.json['username'], request.json['email']) db.session.add(user) db.session.commit() return jsonify({'user': user.to_dict()}), 201 elif request.method == 'GET': users", "lib from functools import wraps # third party packages from flask import Flask,", "from functools import wraps # third party packages from flask import Flask, jsonify,", "__init__(self, username, email): self.username = username self.email = email def __repr__(self): return '<User", "None or request.json['email'] is None): abort() user = User(request.json['username'], request.json['email']) db.session.add(user) db.session.commit() return", "standard lib from functools import wraps # third party packages from flask import", "def __repr__(self): return '<User {}>'.format(self.username) def to_dict(self): return { 'id': self.id, 'username': self.username,", "def hello_world(): return 'Hello World!' @app.route('/users', methods=['GET', 'POST']) def get_users(): if request.method ==", "get_users(): if request.method == 'POST': if (request.json['username'] is None or request.json['email'] is None):", "# endregion # region authorization def check_auth(username, password): return username == 'admin' and", "request.method == 'POST': if (request.json['username'] is None or request.json['email'] is None): abort() user", "for user in users] return jsonify({'users': users_dto}), 200 else: abort(405, \"Method not supported\")", "return f(*args, **kwargs) return decorated # endregion @app.route('/') @requires_auth def hello_world(): return 'Hello", "'email': self.email } # endregion # region authorization def check_auth(username, password): return username", "return jsonify({'users': users_dto}), 200 else: abort(405, \"Method not supported\") @app.errorhandler(405) def custom405(error): response", "with proper credentials', 401, {'WWW-Authenticate': 'Basic realm=\"Login Required\"'}) def requires_auth(f): @wraps(f) def decorated(*args,", "db.Column(db.String(120), unique=True) def __init__(self, username, email): self.username = username self.email = email def", "credentials', 401, {'WWW-Authenticate': 'Basic realm=\"Login Required\"'}) def requires_auth(f): @wraps(f) def decorated(*args, **kwargs): auth", "level for that URL.\\n' 'You have to login with proper credentials', 401, {'WWW-Authenticate':", "request.method == 'GET': users = User.query.all() users_dto = [user.to_dict() for user in users]", "not auth or not check_auth(auth.username, auth.password): return authenticate() return f(*args, **kwargs) return decorated", "def get_users(): if request.method == 'POST': if (request.json['username'] is None or request.json['email'] is", "endregion # region authorization def check_auth(username, password): return username == 'admin' and password", "and password == '<PASSWORD>' def authenticate(): return Response( 'Could not verify your access", "return { 'id': self.id, 'username': self.username, 'email': self.email } # endregion # region", "return username == 'admin' and password == '<PASSWORD>' def authenticate(): return Response( 'Could", "username self.email = email def __repr__(self): return '<User {}>'.format(self.username) def to_dict(self): return {", "self.email = email def __repr__(self): return '<User {}>'.format(self.username) def to_dict(self): return { 'id':", "password == '<PASSWORD>' def authenticate(): return Response( 'Could not verify your access level", "import Flask, jsonify, abort, request, Response from flask.ext.sqlalchemy import SQLAlchemy app = Flask(__name__)", "URL.\\n' 'You have to login with proper credentials', 401, {'WWW-Authenticate': 'Basic realm=\"Login Required\"'})", "401, {'WWW-Authenticate': 'Basic realm=\"Login Required\"'}) def requires_auth(f): @wraps(f) def decorated(*args, **kwargs): auth =", "request, Response from flask.ext.sqlalchemy import SQLAlchemy app = Flask(__name__) app.debug = True app.config['SQLALCHEMY_DATABASE_URI']", "is None or request.json['email'] is None): abort() user = User(request.json['username'], request.json['email']) db.session.add(user) db.session.commit()", "party packages from flask import Flask, jsonify, abort, request, Response from flask.ext.sqlalchemy import", "'GET': users = User.query.all() users_dto = [user.to_dict() for user in users] return jsonify({'users':", "abort(405, \"Method not supported\") @app.errorhandler(405) def custom405(error): response = jsonify({'message': error.description}) return response,", "custom405(error): response = jsonify({'message': error.description}) return response, 405 if __name__ == '__main__': if", "self.id, 'username': self.username, 'email': self.email } # endregion # region authorization def check_auth(username,", "check_auth(auth.username, auth.password): return authenticate() return f(*args, **kwargs) return decorated # endregion @app.route('/') @requires_auth", "def custom405(error): response = jsonify({'message': error.description}) return response, 405 if __name__ == '__main__':", "} # endregion # region authorization def check_auth(username, password): return username == 'admin'", "(request.json['username'] is None or request.json['email'] is None): abort() user = User(request.json['username'], request.json['email']) db.session.add(user)", "@app.route('/users', methods=['GET', 'POST']) def get_users(): if request.method == 'POST': if (request.json['username'] is None", "import SQLAlchemy app = Flask(__name__) app.debug = True app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db' app.secret_key =", "World!' @app.route('/users', methods=['GET', 'POST']) def get_users(): if request.method == 'POST': if (request.json['username'] is", "= SQLAlchemy(app) # region dbClasses class User(db.Model): \"\"\" Represent a user in database", "dbClasses class User(db.Model): \"\"\" Represent a user in database \"\"\" id = db.Column(db.Integer,", "== 'admin' and password == '<PASSWORD>' def authenticate(): return Response( 'Could not verify", "access level for that URL.\\n' 'You have to login with proper credentials', 401,", "else: abort(405, \"Method not supported\") @app.errorhandler(405) def custom405(error): response = jsonify({'message': error.description}) return", "to login with proper credentials', 401, {'WWW-Authenticate': 'Basic realm=\"Login Required\"'}) def requires_auth(f): @wraps(f)", "jsonify({'users': users_dto}), 200 else: abort(405, \"Method not supported\") @app.errorhandler(405) def custom405(error): response =", "from flask import Flask, jsonify, abort, request, Response from flask.ext.sqlalchemy import SQLAlchemy app", "@app.errorhandler(405) def custom405(error): response = jsonify({'message': error.description}) return response, 405 if __name__ ==", "return jsonify({'user': user.to_dict()}), 201 elif request.method == 'GET': users = User.query.all() users_dto =", "Response from flask.ext.sqlalchemy import SQLAlchemy app = Flask(__name__) app.debug = True app.config['SQLALCHEMY_DATABASE_URI'] =", "Flask(__name__) app.debug = True app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db' app.secret_key = '<KEY>' db = SQLAlchemy(app)", "if (request.json['username'] is None or request.json['email'] is None): abort() user = User(request.json['username'], request.json['email'])", "app.debug = True app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db' app.secret_key = '<KEY>' db = SQLAlchemy(app) #", "= email def __repr__(self): return '<User {}>'.format(self.username) def to_dict(self): return { 'id': self.id,", "None): abort() user = User(request.json['username'], request.json['email']) db.session.add(user) db.session.commit() return jsonify({'user': user.to_dict()}), 201 elif", "User(db.Model): \"\"\" Represent a user in database \"\"\" id = db.Column(db.Integer, primary_key=True) username", "requires_auth(f): @wraps(f) def decorated(*args, **kwargs): auth = request.authorization if not auth or not", "db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True) email = db.Column(db.String(120), unique=True) def __init__(self, username,", "User.query.all() users_dto = [user.to_dict() for user in users] return jsonify({'users': users_dto}), 200 else:", "to_dict(self): return { 'id': self.id, 'username': self.username, 'email': self.email } # endregion #", "authorization def check_auth(username, password): return username == 'admin' and password == '<PASSWORD>' def", "{}>'.format(self.username) def to_dict(self): return { 'id': self.id, 'username': self.username, 'email': self.email } #", "in database \"\"\" id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True) email =", "def check_auth(username, password): return username == 'admin' and password == '<PASSWORD>' def authenticate():", "from flask.ext.sqlalchemy import SQLAlchemy app = Flask(__name__) app.debug = True app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db'", "# standard lib from functools import wraps # third party packages from flask", "app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db' app.secret_key = '<KEY>' db = SQLAlchemy(app) # region dbClasses class", "or request.json['email'] is None): abort() user = User(request.json['username'], request.json['email']) db.session.add(user) db.session.commit() return jsonify({'user':", "[user.to_dict() for user in users] return jsonify({'users': users_dto}), 200 else: abort(405, \"Method not", "'Basic realm=\"Login Required\"'}) def requires_auth(f): @wraps(f) def decorated(*args, **kwargs): auth = request.authorization if", "users = User.query.all() users_dto = [user.to_dict() for user in users] return jsonify({'users': users_dto}),", "verify your access level for that URL.\\n' 'You have to login with proper", "self.username, 'email': self.email } # endregion # region authorization def check_auth(username, password): return", "# third party packages from flask import Flask, jsonify, abort, request, Response from", "'<KEY>' db = SQLAlchemy(app) # region dbClasses class User(db.Model): \"\"\" Represent a user", "a user in database \"\"\" id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(80), unique=True)", "SQLAlchemy app = Flask(__name__) app.debug = True app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db' app.secret_key = '<KEY>'", "endregion @app.route('/') @requires_auth def hello_world(): return 'Hello World!' @app.route('/users', methods=['GET', 'POST']) def get_users():", "not check_auth(auth.username, auth.password): return authenticate() return f(*args, **kwargs) return decorated # endregion @app.route('/')", "= username self.email = email def __repr__(self): return '<User {}>'.format(self.username) def to_dict(self): return", "jsonify({'user': user.to_dict()}), 201 elif request.method == 'GET': users = User.query.all() users_dto = [user.to_dict()", "'sqlite:///test.db' app.secret_key = '<KEY>' db = SQLAlchemy(app) # region dbClasses class User(db.Model): \"\"\"", "def __init__(self, username, email): self.username = username self.email = email def __repr__(self): return", "supported\") @app.errorhandler(405) def custom405(error): response = jsonify({'message': error.description}) return response, 405 if __name__", "200 else: abort(405, \"Method not supported\") @app.errorhandler(405) def custom405(error): response = jsonify({'message': error.description})", "region authorization def check_auth(username, password): return username == 'admin' and password == '<PASSWORD>'", "True app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db' app.secret_key = '<KEY>' db = SQLAlchemy(app) # region dbClasses", "password): return username == 'admin' and password == '<PASSWORD>' def authenticate(): return Response(", "users] return jsonify({'users': users_dto}), 200 else: abort(405, \"Method not supported\") @app.errorhandler(405) def custom405(error):", "if request.method == 'POST': if (request.json['username'] is None or request.json['email'] is None): abort()", "users_dto = [user.to_dict() for user in users] return jsonify({'users': users_dto}), 200 else: abort(405,", "in users] return jsonify({'users': users_dto}), 200 else: abort(405, \"Method not supported\") @app.errorhandler(405) def", "self.username = username self.email = email def __repr__(self): return '<User {}>'.format(self.username) def to_dict(self):", "login with proper credentials', 401, {'WWW-Authenticate': 'Basic realm=\"Login Required\"'}) def requires_auth(f): @wraps(f) def", "= User(request.json['username'], request.json['email']) db.session.add(user) db.session.commit() return jsonify({'user': user.to_dict()}), 201 elif request.method == 'GET':", "users_dto}), 200 else: abort(405, \"Method not supported\") @app.errorhandler(405) def custom405(error): response = jsonify({'message':", "functools import wraps # third party packages from flask import Flask, jsonify, abort,", "'admin' and password == '<PASSWORD>' def authenticate(): return Response( 'Could not verify your", "def decorated(*args, **kwargs): auth = request.authorization if not auth or not check_auth(auth.username, auth.password):", "user = User(request.json['username'], request.json['email']) db.session.add(user) db.session.commit() return jsonify({'user': user.to_dict()}), 201 elif request.method ==", "__repr__(self): return '<User {}>'.format(self.username) def to_dict(self): return { 'id': self.id, 'username': self.username, 'email':", "db.session.commit() return jsonify({'user': user.to_dict()}), 201 elif request.method == 'GET': users = User.query.all() users_dto", "== 'POST': if (request.json['username'] is None or request.json['email'] is None): abort() user =", "auth.password): return authenticate() return f(*args, **kwargs) return decorated # endregion @app.route('/') @requires_auth def", "not supported\") @app.errorhandler(405) def custom405(error): response = jsonify({'message': error.description}) return response, 405 if", "request.json['email']) db.session.add(user) db.session.commit() return jsonify({'user': user.to_dict()}), 201 elif request.method == 'GET': users =", "unique=True) email = db.Column(db.String(120), unique=True) def __init__(self, username, email): self.username = username self.email", "db.Column(db.String(80), unique=True) email = db.Column(db.String(120), unique=True) def __init__(self, username, email): self.username = username", "return decorated # endregion @app.route('/') @requires_auth def hello_world(): return 'Hello World!' @app.route('/users', methods=['GET',", "error.description}) return response, 405 if __name__ == '__main__': if app.debug: app.run() else: app.run(host='0.0.0.0')", "'id': self.id, 'username': self.username, 'email': self.email } # endregion # region authorization def", "check_auth(username, password): return username == 'admin' and password == '<PASSWORD>' def authenticate(): return", "'<PASSWORD>' def authenticate(): return Response( 'Could not verify your access level for that", "= User.query.all() users_dto = [user.to_dict() for user in users] return jsonify({'users': users_dto}), 200", "have to login with proper credentials', 401, {'WWW-Authenticate': 'Basic realm=\"Login Required\"'}) def requires_auth(f):", "request.authorization if not auth or not check_auth(auth.username, auth.password): return authenticate() return f(*args, **kwargs)", "authenticate() return f(*args, **kwargs) return decorated # endregion @app.route('/') @requires_auth def hello_world(): return", "= db.Column(db.String(80), unique=True) email = db.Column(db.String(120), unique=True) def __init__(self, username, email): self.username =", "packages from flask import Flask, jsonify, abort, request, Response from flask.ext.sqlalchemy import SQLAlchemy" ]
[ "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,", "(list, tuple)): for p in path_to_files: if not path.isfile(p): return False elif isinstance(path_to_files,", "list_images ] # [()] is used instead of .value elif isinstance(list_images, int): data", "the given image into the given size :param img: as numpy array :param", "\"mrc\": with mrcfile.mmap(path, permissive=True, mode=\"r\") as mrc: result_list = list(range(1)) except Exception as", "img = img.astype(np.float64, copy=False) mean = np.mean(img) std = np.std(img) img = (img", "MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "images number 5: ['MDF']['images']['5']['image'][()] \"\"\" def create_circular_mask(h, w, center=None, radius=None): if center is", "iterate = True list_new_paths += [path.join(p, f) for f in listdir(p)] elif path.isfile(p):", "It will be converted in list of integer :param path: :return: \"\"\" print(\"Try", "except Exception as e: print(e) print( \"WARNING in get_list_images: the file '\" +", "list of valid hdf \"\"\" return [ path_to_file for path_to_file in path_to_files if", "an HDF file with the following format:\\n\\t['MDF']['images']['0']['image']\" ) print(\"you try to get the", "getImages_fromList_key(file_index_tubles): \"\"\" Returns the images in the hdf file (path_to_file) listed in (list_images)", ".value elif isinstance(list_images, int): data = np.nan_to_num(f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()]) else: print( \"\\nERROR in getImages_fromList_key: invalid", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED", "of numpy arrays \"\"\" data = list() if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] == \"hdf\":", "base of its mean and variance :param img: :return: \"\"\" import numpy as", "(and subfolder)given from the user :return: list of valid hdf \"\"\" return [", "elif isinstance(path_to_files, str): return path.isfile(path_to_files) return True def calc_2d_spectra(img): from scipy import fftpack", "[ np.nan_to_num(f[\"MDF\"][\"images\"][str(i)][\"image\"][()]) for i in list_images ] # [()] is used instead of", "path) import os filename_ext = os.path.basename(path).split(\".\")[-1] result_list = None try: if filename_ext in", "t == 1: return np.flipud(img) elif t == 2: return np.fliplr(img) elif t", "min(center[0], center[1], w - center[0], h - center[1]) Y, X = np.ogrid[:h, :w]", "keys representing the images in the hdf/mrcs/st file. It will be converted in", "HDF file with the following format:\\n\\t['MDF']['images']['0']['image']\" ) print(\"you try to get the following", "mask def checkfiles(path_to_files): \"\"\" checks if the hdf files are in the correct", "this software and associated documentation files (the \"Software\"), to deal in the Software", "OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "is True: return getList_files(list_new_paths) return list_new_paths def getList_relevant_files(path_to_files): \"\"\" Check if the given", "exit() elif path.basename(path_to_file).split(\".\")[-1] in [\"mrc\", \"mrcs\", \"st\"]: data = [] with mrcfile.mmap(path_to_file, permissive=True,", "of strings:\", type(list_images), ) print(\"you try to get the following images\") print(list_images) exit()", "= list() if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try: with h5py.File(path_to_file, driver=\"core\") as", "** 2) mask = dist_from_center <= radius return mask def checkfiles(path_to_files): \"\"\" checks", "h5py from PIL import Image # install it via pip install pillow import", "of valid hdf :param path_to_files: list of all the files present in the", "elif path.isfile(p): list_new_paths.append(p) else: print( \"WARNING: The given path '\" + str(p) +", ":param img: :param t: type of the flip 1 --> flip over the", "np F1 = fftpack.fft2(img) F2 = fftpack.fftshift(F1) psd2D = np.abs(F2) ** 2 return", "USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from os import path, listdir", "print( \"\\nERROR in getImages_fromList_key: invalid list_images, it should be a string or a", "list(f[\"MDF\"][\"images\"])] except: print( \"WARNING in get_list_images: the file '\" + path + \"", "and Y) otherwise --> no flip :return: \"\"\" if t == 1: return", "OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", "CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "not a folder or a file and it will be ignored\" ) if", "\"hdf\": try: with h5py.File(path, \"r\") as f: list_candidate = [int(v) for v in", "use the middle of the image center = (int(w / 2), int(h /", "\"\"\" normalize the images in base of its mean and variance :param img:", "the Software without restriction, including without limitation the rights to use, copy, modify,", "/ (std+0.00001) # img = img.astype(np.float32, copy=False) return img def flip_img(img, t=None): \"\"\"", "person obtaining a copy of this software and associated documentation files (the \"Software\"),", "Automatic 2D class selection tool. MIT License Copyright (c) 2019 <NAME> Institute of", "the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies", "f: if isinstance(list_images, list) or isinstance(list_images, tuple): data = [ f[\"MDF\"][\"images\"][str(i)][\"image\"][()] for i", "int): data = f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()] else: print( \"\\nERROR in getImages_fromList_key: invalid list_images, it should", "img.astype(np.float32, copy=False) return img def flip_img(img, t=None): \"\"\" It flip the image in", "direction.(X) 2 --> flip over the column Flipped array in right-left direction(Y) 3", "row. Flipped array in up-down direction.(X) 2 --> flip over the column Flipped", "path.isfile(path_to_files) return True def calc_2d_spectra(img): from scipy import fftpack import numpy as np", "or isinstance(list_images, tuple): data = [ f[\"MDF\"][\"images\"][str(i)][\"image\"][()] for i in list_images ] #", "False for p in paths: if path.isdir(p): iterate = True list_new_paths += [path.join(p,", "without restriction, including without limitation the rights to use, copy, modify, merge, publish,", "merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit", "Institute of Molecular Physiology Permission is hereby granted, free of charge, to any", "It will be ignored\" ) if filename_ext == \"hdf\": try: with h5py.File(path, \"r\")", "resize size :return: return the resized img \"\"\" im = Image.fromarray(img) return np.array(im.resize(resize,", "mrc: if isinstance(list_images, int): list_images = [list_images] if isinstance(list_images, list) or isinstance(list_images, tuple):", ":return: \"\"\" if t == 1: return np.flipud(img) elif t == 2: return", "return True def calc_2d_spectra(img): from scipy import fftpack import numpy as np F1", "3 --> flip over the column and the row (X and Y) otherwise", "with the following format:\\n\\t['MDF']['images']. It will be ignored\" ) if len(list_candidate) > 0:", "mrcfile.mmap(path, permissive=True, mode=\"r\") as mrc: result_list = list(range(1)) except Exception as e: print(e)", "\"\"\" Returns the images in the hdf file (path_to_file) listed in (list_images) :param", "np.array(im.resize(resize, resample=Image.BILINEAR)) def normalize_img(img): \"\"\" normalize the images in base of its mean", "list(range(1)) except Exception as e: print(e) print( \"WARNING in get_list_images: the file '\"", "[\"mrc\", \"mrcs\", \"st\"]: data = [] with mrcfile.mmap(path_to_file, permissive=True, mode=\"r\") as mrc: if", "if isinstance(list_images, list) or isinstance(list_images, tuple): if mrc.header.nz > 1: if len(list_images)==1: data", "[list_images] if isinstance(list_images, list) or isinstance(list_images, tuple): if mrc.header.nz > 1: data =", "hdf :param path_to_files: list of all the files present in the folder (and", "tuple ): data = [ np.nan_to_num(f[\"MDF\"][\"images\"][str(i)][\"image\"][()]) for i in list_images ] # [()]", "from PIL import Image # install it via pip install pillow import numpy", "\"\\nERROR in getImages_fromList_key: invalid list_images, it should be a string or a list/tuple", "in list_images] elif len(list_images) == 1: data = np.nan_to_num(mrc.data) result_data.append(data) return result_data def", "or path_to_file.endswith(\"mrc\") or path_to_file.endswith(\"st\") or h5py.is_hdf5(path_to_file) ] \"\"\" FUNCTION TO READ THE HDF\"\"\"", ":param list_images: list of keys of the DB. It is the output( or", "= [list_images] if isinstance(list_images, list) or isinstance(list_images, tuple): if mrc.header.nz > 1: data", "== 1: data = [mrc.data] return data \"\"\" FUNCTION TO MANIPULATE THE IMAGES\"\"\"", "return the resized img \"\"\" im = Image.fromarray(img) return np.array(im.resize(resize, resample=Image.BILINEAR)) def normalize_img(img):", "= list_candidate if filename_ext == \"mrc\": with mrcfile.mmap(path, permissive=True, mode=\"r\") as mrc: result_list", "as np F1 = fftpack.fft2(img) F2 = fftpack.fftshift(F1) psd2D = np.abs(F2) ** 2", "result_list = None try: if filename_ext in {\"mrcs\", \"st\"}: with mrcfile.mmap(path, permissive=True, mode=\"r\")", "isinstance(list_images, int): list_images = [list_images] if isinstance(list_images, list) or isinstance(list_images, tuple): if mrc.header.nz", "file is the following: ['MDF']['images']['i']['image'] where i is a number representing the i-th", "OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from os import path, listdir import", ":param path_to_file: path to hdf file :param list_images: list of keys of the", "as mrc: if isinstance(list_images, int): list_images = [list_images] if isinstance(list_images, list) or isinstance(list_images,", "over the column and the row (X and Y) otherwise --> no flip", "THE SOFTWARE. \"\"\" from os import path, listdir import h5py from PIL import", "path_to_file in path_to_files if path_to_file.endswith(\"mrcs\") or path_to_file.endswith(\"mrc\") or path_to_file.endswith(\"st\") or h5py.is_hdf5(path_to_file) ] \"\"\"", "of Molecular Physiology Permission is hereby granted, free of charge, to any person", "files in the given paths. It is called recursively :param paths: path or", "in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED", ":return: Returns a list of numpy arrays \"\"\" # driver=\"core\" result_data = list()", "path to hdf file :param list_images: list of keys of the DB. It", "'r') as f: if isinstance(list_images, list) or isinstance( list_images, tuple ): data =", "SOFTWARE. \"\"\" from os import path, listdir import h5py from PIL import Image", "data = [] with mrcfile.mmap(path_to_file, permissive=True, mode=\"r\") as mrc: if isinstance(list_images, int): list_images", "data = f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()] else: print( \"\\nERROR in getImages_fromList_key: invalid list_images, it should be", "IMAGES\"\"\" def apply_mask(img, mask): mean = np.mean(img) img[mask==False]=mean return img def resize_img(img, resize=(76,", "file. It will be converted in list of integer :param path: :return: \"\"\"", "path or list of paths :return: \"\"\" if isinstance(paths, str): paths = [paths]", "sublicense, and/or sell copies of the Software, and to permit persons to whom", "this permission notice shall be included in all copies or substantial portions of", "= min(center[0], center[1], w - center[0], h - center[1]) Y, X = np.ogrid[:h,", "from the user :return: list of valid hdf \"\"\" return [ path_to_file for", "modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to", "isinstance( list_images, tuple ): data = [ np.nan_to_num(f[\"MDF\"][\"images\"][str(i)][\"image\"][()]) for i in list_images ]", "return result_list def getImages_fromList_key(file_index_tubles): \"\"\" Returns the images in the hdf file (path_to_file)", "else: data = [np.nan_to_num(mrc.data[i]) for i in list_images] elif len(list_images) == 1: data", "ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "os import path, listdir import h5py from PIL import Image # install it", "of .value elif isinstance(list_images, int): data = f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()] else: print( \"\\nERROR in getImages_fromList_key:", "return False elif isinstance(path_to_files, str): return path.isfile(path_to_files) return True def calc_2d_spectra(img): from scipy", "if len(list_candidate) > 0: result_list = list_candidate if filename_ext == \"mrc\": with mrcfile.mmap(path,", "given from 'get_list_images' :return: Returns a list of numpy arrays \"\"\" # driver=\"core\"", "of numpy arrays \"\"\" # driver=\"core\" result_data = list() for path_to_file, list_images in", "THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from os import path,", "WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "resize: resize size :return: return the resized img \"\"\" im = Image.fromarray(img) return", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF", "present in the folder (and subfolder)given from the user :return: list of valid", "list of all the files present in the folder (and subfolder)given from the", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A", "[paths] list_new_paths = list() iterate = False for p in paths: if path.isdir(p):", "h5py.File(path, \"r\") as f: list_candidate = [int(v) for v in list(f[\"MDF\"][\"images\"])] except: print(", "MANIPULATE THE IMAGES\"\"\" def apply_mask(img, mask): mean = np.mean(img) img[mask==False]=mean return img def", "flip over the row. Flipped array in up-down direction.(X) 2 --> flip over", "list_candidate = [int(v) for v in list(f[\"MDF\"][\"images\"])] except: print( \"WARNING in get_list_images: the", "over the column Flipped array in right-left direction(Y) 3 --> flip over the", "is called recursively :param paths: path or list of paths :return: \"\"\" if", "scipy import fftpack import numpy as np F1 = fftpack.fft2(img) F2 = fftpack.fftshift(F1)", "True: return getList_files(list_new_paths) return list_new_paths def getList_relevant_files(path_to_files): \"\"\" Check if the given files", "notice and this permission notice shall be included in all copies or substantial", "center = (int(w / 2), int(h / 2)) if radius is None: #", "center and image walls radius = min(center[0], center[1], w - center[0], h -", "integer :param path: :return: \"\"\" print(\"Try to list images on\", path) import os", "IN THE SOFTWARE. \"\"\" from os import path, listdir import h5py from PIL", "filename_ext == \"hdf\": try: with h5py.File(path, \"r\") as f: list_candidate = [int(v) for", "if path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try: with h5py.File(path_to_file, 'r') as f: if isinstance(list_images, list)", "Image.fromarray(img) return np.array(im.resize(resize, resample=Image.BILINEAR)) def normalize_img(img): \"\"\" normalize the images in base of", "HDF file with the following format:\\n\\t['MDF']['images']. It will be ignored\" ) if len(list_candidate)", "(img - mean) / (std+0.00001) # img = img.astype(np.float32, copy=False) return img def", "or part of its) given from 'get_list_images' :return: Returns a list of numpy", "result_list def getImages_fromList_key(file_index_tubles): \"\"\" Returns the images in the hdf file (path_to_file) listed", "TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE", "is None: # use the smallest distance between the center and image walls", "\"\\nERROR in getImages_fromList_key: the file '\" + path_to_file + \" is not an", "as mrc: result_list = list(range(1)) except Exception as e: print(e) print( \"WARNING in", "into the given size :param img: as numpy array :param resize: resize size", "str): paths = [paths] list_new_paths = list() iterate = False for p in", "TO MANIPULATE THE IMAGES\"\"\" def apply_mask(img, mask): mean = np.mean(img) img[mask==False]=mean return img", "is used instead of .value elif isinstance(list_images, int): data = np.nan_to_num(f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()]) else: print(", "the given typ :param img: :param t: type of the flip 1 -->", "path + \" is not an HDF file with the following format:\\n\\t['MDF']['images']. It", "charge, to any person obtaining a copy of this software and associated documentation", "if not path.isfile(p): return False elif isinstance(path_to_files, str): return path.isfile(path_to_files) return True def", "the column and the row (X and Y) otherwise --> no flip :return:", "<NAME> Institute of Molecular Physiology Permission is hereby granted, free of charge, to", "to get the following images\") print(list_images) print(\"there are \" + str(len(f[\"MDF\"][\"images\"]))) exit() elif", "KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "list images on\", path) import os filename_ext = os.path.basename(path).split(\".\")[-1] result_list = None try:", "permissive=True, mode=\"r\") as mrc: if isinstance(list_images, int): list_images = [list_images] if isinstance(list_images, list)", "image center = (int(w / 2), int(h / 2)) if radius is None:", "p in path_to_files: if not path.isfile(p): return False elif isinstance(path_to_files, str): return path.isfile(path_to_files)", "FUNCTION TO READ THE HDF\"\"\" def get_key_list_images(path): \"\"\" Returns the list of the", "images on\", path) import os filename_ext = os.path.basename(path).split(\".\")[-1] result_list = None try: if", "its) given from 'get_list_images' :return: Returns a list of numpy arrays \"\"\" #", "get the images number 5: ['MDF']['images']['5']['image'][()] \"\"\" def create_circular_mask(h, w, center=None, radius=None): if", "get the following images\") print(list_images) exit() except Exception as e: print(e) print( \"\\nERROR", "76)): \"\"\" Resize the given image into the given size :param img: as", "the file '\" + path + \" is not an HDF file with", "img def flip_img(img, t=None): \"\"\" It flip the image in function of the", "get_list_images: the file '\" + path + \" is not an HDF file", "return mask def checkfiles(path_to_files): \"\"\" checks if the hdf files are in the", "[np.nan_to_num(mrc.data[i]) for i in list_images] elif len(list_images) == 1: data = np.nan_to_num(mrc.data) result_data.append(data)", ":return: \"\"\" if isinstance(paths, str): paths = [paths] list_new_paths = list() iterate =", "in get_list_images: the file '\" + path + \" is not an HDF", "= list_candidate return result_list def getImages_fromList_key(file_index_tubles): \"\"\" Returns the images in the hdf", "= np.std(img) img = (img - mean) / (std+0.00001) # img = img.astype(np.float32,", "import mrcfile \"\"\" The format of the .hf file is the following: ['MDF']['images']['i']['image']", "numpy array :param resize: resize size :return: return the resized img \"\"\" im", "np.ogrid[:h, :w] dist_from_center = np.sqrt((X - center[0]) ** 2 + (Y - center[1])", "= [ f[\"MDF\"][\"images\"][str(i)][\"image\"][()] for i in list_images ] # [()] is used instead", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION", "of valid hdf \"\"\" return [ path_to_file for path_to_file in path_to_files if path_to_file.endswith(\"mrcs\")", "or isinstance(list_images, tuple): if mrc.header.nz > 1: if len(list_images)==1: data = np.nan_to_num(mrc.data[list_images[0]]) else:", "False elif isinstance(path_to_files, str): return path.isfile(path_to_files) return True def calc_2d_spectra(img): from scipy import", "permissive=True, mode=\"r\") as mrc: list_candidate = [i for i in range(mrc.header.nz)] if len(list_candidate)", "persons to whom the Software is furnished to do so, subject to the", "[()] is used instead of .value elif isinstance(list_images, int): data = np.nan_to_num(f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()]) else:", "mrc.header.nz > 1: data = [mrc.data[i] for i in list_images] elif len(list_images) ==", "Resize the given image into the given size :param img: as numpy array", "should be a string or a list/tuple of strings:\", type(list_images), ) print(\"you try", "return data \"\"\" FUNCTION TO MANIPULATE THE IMAGES\"\"\" def apply_mask(img, mask): mean =", "Software is furnished to do so, subject to the following conditions: The above", "= fftpack.fftshift(F1) psd2D = np.abs(F2) ** 2 return psd2D def getList_files(paths): \"\"\" Returns", "isinstance(list_images, tuple): if mrc.header.nz > 1: if len(list_images)==1: data = np.nan_to_num(mrc.data[list_images[0]]) else: data", "IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "given from 'get_list_images' :return: Returns a list of numpy arrays \"\"\" data =", "= np.mean(img) std = np.std(img) img = (img - mean) / (std+0.00001) #", "2D class selection tool. MIT License Copyright (c) 2019 <NAME> Institute of Molecular", "mrc.header.nz > 1: if len(list_images)==1: data = np.nan_to_num(mrc.data[list_images[0]]) else: data = [np.nan_to_num(mrc.data[i]) for", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND", "['MDF']['images']['5']['image'][()] \"\"\" def create_circular_mask(h, w, center=None, radius=None): if center is None: # use", "list_candidate return result_list def getImages_fromList_key(file_index_tubles): \"\"\" Returns the images in the hdf file", "return np.flipud(img) elif t == 2: return np.fliplr(img) elif t == 3: return", "\" is not an HDF file with the following format:\\n\\t['MDF']['images']['0']['image']\" ) print(\"you try", "list_images] elif len(list_images) == 1: data = [mrc.data] return data \"\"\" FUNCTION TO", "): data = [ np.nan_to_num(f[\"MDF\"][\"images\"][str(i)][\"image\"][()]) for i in list_images ] # [()] is", "if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try: with h5py.File(path_to_file, driver=\"core\") as f: if", "distance between the center and image walls radius = min(center[0], center[1], w -", "data = [mrc.data] return data \"\"\" FUNCTION TO MANIPULATE THE IMAGES\"\"\" def apply_mask(img,", ":param img: as numpy array :param resize: resize size :return: return the resized", "np.abs(F2) ** 2 return psd2D def getList_files(paths): \"\"\" Returns the list of the", "be ignored\" ) if filename_ext == \"hdf\": try: with h5py.File(path, \"r\") as f:", "the flip 1 --> flip over the row. Flipped array in up-down direction.(X)", "return [ path_to_file for path_to_file in path_to_files if path_to_file.endswith(\"mrcs\") or path_to_file.endswith(\"mrc\") or path_to_file.endswith(\"st\")", "file with the following format:\\n\\t['MDF']['images']. It will be ignored\" ) if len(list_candidate) >", "--> flip over the column Flipped array in right-left direction(Y) 3 --> flip", "F2 = fftpack.fftshift(F1) psd2D = np.abs(F2) ** 2 return psd2D def getList_files(paths): \"\"\"", "list) or isinstance(list_images, tuple): if mrc.header.nz > 1: data = [mrc.data[i] for i", "paths :return: \"\"\" if isinstance(path_to_files, (list, tuple)): for p in path_to_files: if not", "path.isfile(p): return False elif isinstance(path_to_files, str): return path.isfile(path_to_files) return True def calc_2d_spectra(img): from", "to deal in the Software without restriction, including without limitation the rights to", "Check if the given files are hdf/mrcs/st with a valid format. Return The", "for v in list(f[\"MDF\"][\"images\"])] except: print( \"WARNING in get_list_images: the file '\" +", "not an HDF file with the following format:\\n\\t['MDF']['images']. It will be ignored\" )", "flip 1 --> flip over the row. Flipped array in up-down direction.(X) 2", "in the folder (and subfolder)given from the user :return: list of valid hdf", "- mean) / (std+0.00001) # img = img.astype(np.float32, copy=False) return img def flip_img(img,", "e: print(e) print( \"\\nERROR in getImages_fromList_key: the file '\" + path_to_file + \"", "be ignored\" ) if len(list_candidate) > 0: result_list = list_candidate return result_list def", "elif len(list_images) == 1: data = np.nan_to_num(mrc.data) result_data.append(data) return result_data def getImages_fromList_key_old(path_to_file, list_images):", "the list of the valid hdf files in the given paths. It is", "number representing the i-th images hence to get the images number 5: ['MDF']['images']['5']['image'][()]", "[mrc.data[i] for i in list_images] elif len(list_images) == 1: data = [mrc.data] return", "True def calc_2d_spectra(img): from scipy import fftpack import numpy as np F1 =", "np.nan_to_num(f[\"MDF\"][\"images\"][str(i)][\"image\"][()]) for i in list_images ] # [()] is used instead of .value", "path.isdir(p): iterate = True list_new_paths += [path.join(p, f) for f in listdir(p)] elif", "to whom the Software is furnished to do so, subject to the following", "'get_list_images' :return: Returns a list of numpy arrays \"\"\" data = list() if", "invalid list_images, it should be a string or a list/tuple of strings:\", type(list_images),", ".value elif isinstance(list_images, int): data = f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()] else: print( \"\\nERROR in getImages_fromList_key: invalid", "documentation files (the \"Software\"), to deal in the Software without restriction, including without", "[i for i in range(mrc.header.nz)] if len(list_candidate) > 0: result_list = list_candidate if", "ignored\" ) if filename_ext == \"hdf\": try: with h5py.File(path, \"r\") as f: list_candidate", "\"\"\" if isinstance(path_to_files, (list, tuple)): for p in path_to_files: if not path.isfile(p): return", "files (the \"Software\"), to deal in the Software without restriction, including without limitation", "the following format:\\n\\t['MDF']['images']['0']['image']\" ) print(\"you try to get the following images\") print(list_images) print(\"there", "given paths. It is called recursively :param paths: path or list of paths", "Software without restriction, including without limitation the rights to use, copy, modify, merge,", "paths. It is called recursively :param paths: path or list of paths :return:", "the user :return: list of valid hdf \"\"\" return [ path_to_file for path_to_file", "if isinstance(list_images, list) or isinstance( list_images, tuple ): data = [ np.nan_to_num(f[\"MDF\"][\"images\"][str(i)][\"image\"][()]) for", "hdf files in the given paths. It is called recursively :param paths: path", "if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try: with h5py.File(path_to_file, 'r') as f: if", "to do so, subject to the following conditions: The above copyright notice and", "+ path + \" is not an valid mrc file. It will be", "is not an HDF file with the following format:\\n\\t['MDF']['images']['0']['image']\" ) print(\"you try to", "array in up-down direction.(X) 2 --> flip over the column Flipped array in", "between the center and image walls radius = min(center[0], center[1], w - center[0],", "print( \"\\nERROR in getImages_fromList_key: the file '\" + path_to_file + \" is not", "as mrc: list_candidate = [i for i in range(mrc.header.nz)] if len(list_candidate) > 0:", "mrcfile.mmap(path, permissive=True, mode=\"r\") as mrc: list_candidate = [i for i in range(mrc.header.nz)] if", "in the Software without restriction, including without limitation the rights to use, copy,", "import fftpack import numpy as np F1 = fftpack.fft2(img) F2 = fftpack.fftshift(F1) psd2D", "= [i for i in range(mrc.header.nz)] if len(list_candidate) > 0: result_list = list_candidate", "Exception as e: print(e) print( \"WARNING in get_list_images: the file '\" + path", "images\") print(list_images) exit() except Exception as e: print(e) print( \"\\nERROR in getImages_fromList_key: the", ":param path_to_files: list of paths :return: \"\"\" if isinstance(path_to_files, (list, tuple)): for p", ":param t: type of the flip 1 --> flip over the row. Flipped", "output( or part of its) given from 'get_list_images' :return: Returns a list of", "isinstance(list_images, list) or isinstance(list_images, tuple): if mrc.header.nz > 1: if len(list_images)==1: data =", "are hdf/mrcs/st with a valid format. Return The list of valid hdf :param", "[] with mrcfile.mmap(path_to_file, permissive=True, mode=\"r\") as mrc: if isinstance(list_images, int): list_images = [list_images]", "the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "path_to_file + \" is not an HDF file with the following format:\\n\\t['MDF']['images']['0']['image']\" )", "\"\"\" print(\"Try to list images on\", path) import os filename_ext = os.path.basename(path).split(\".\")[-1] result_list", "to any person obtaining a copy of this software and associated documentation files", "isinstance(list_images, list) or isinstance(list_images, tuple): data = [ f[\"MDF\"][\"images\"][str(i)][\"image\"][()] for i in list_images", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN", "def getList_files(paths): \"\"\" Returns the list of the valid hdf files in the", "def normalize_img(img): \"\"\" normalize the images in base of its mean and variance", "(std+0.00001) # img = img.astype(np.float32, copy=False) return img def flip_img(img, t=None): \"\"\" It", "Returns the images in the hdf file (path_to_file) listed in (list_images) :param path_to_file:", "data = [mrc.data[i] for i in list_images] elif len(list_images) == 1: data =", "paths: if path.isdir(p): iterate = True list_new_paths += [path.join(p, f) for f in", "# install it via pip install pillow import numpy as np import mrcfile", "checks if the hdf files are in the correct path and returns True", "print(e) print( \"WARNING in get_list_images: the file '\" + path + \" is", "with mrcfile.mmap(path_to_file, permissive=True, mode=\"r\") as mrc: if isinstance(list_images, int): list_images = [list_images] if", ":return: list of valid hdf \"\"\" return [ path_to_file for path_to_file in path_to_files", "/ 2)) if radius is None: # use the smallest distance between the", "hdf file (path_to_file) listed in (list_images) :param path_to_file: path to hdf file :param", "img: :return: \"\"\" import numpy as np # img = img.astype(np.float64, copy=False) mean", "and returns True if all of them exists :param path_to_files: list of paths", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from", "data = np.nan_to_num(f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()]) else: print( \"\\nERROR in getImages_fromList_key: invalid list_images, it should be", "i is a number representing the i-th images hence to get the images", "1: data = np.nan_to_num(mrc.data) result_data.append(data) return result_data def getImages_fromList_key_old(path_to_file, list_images): \"\"\" Returns the", "a copy of this software and associated documentation files (the \"Software\"), to deal", "Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "i in list_images ] # [()] is used instead of .value elif isinstance(list_images,", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "where i is a number representing the i-th images hence to get the", "return path.isfile(path_to_files) return True def calc_2d_spectra(img): from scipy import fftpack import numpy as", "number 5: ['MDF']['images']['5']['image'][()] \"\"\" def create_circular_mask(h, w, center=None, radius=None): if center is None:", "it should be a string or a list/tuple of strings:\", type(list_images), ) print(\"you", "with mrcfile.mmap(path, permissive=True, mode=\"r\") as mrc: list_candidate = [i for i in range(mrc.header.nz)]", "list() iterate = False for p in paths: if path.isdir(p): iterate = True", "mask): mean = np.mean(img) img[mask==False]=mean return img def resize_img(img, resize=(76, 76)): \"\"\" Resize", "image in function of the given typ :param img: :param t: type of", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF", "Returns the list of the valid hdf files in the given paths. It", "i in range(mrc.header.nz)] if len(list_candidate) > 0: result_list = list_candidate if filename_ext ==", "a file and it will be ignored\" ) if iterate is True: return", "DEALINGS IN THE SOFTWARE. \"\"\" from os import path, listdir import h5py from", "np.mean(img) img[mask==False]=mean return img def resize_img(img, resize=(76, 76)): \"\"\" Resize the given image", "correct path and returns True if all of them exists :param path_to_files: list", "\"\"\" Automatic 2D class selection tool. MIT License Copyright (c) 2019 <NAME> Institute", "flip over the column Flipped array in right-left direction(Y) 3 --> flip over", ") if filename_ext == \"hdf\": try: with h5py.File(path, \"r\") as f: list_candidate =", "if path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try: with h5py.File(path_to_file, driver=\"core\") as f: if isinstance(list_images, list)", "try: with h5py.File(path_to_file, driver=\"core\") as f: if isinstance(list_images, list) or isinstance(list_images, tuple): data", "numpy arrays \"\"\" # driver=\"core\" result_data = list() for path_to_file, list_images in file_index_tubles:", "\" + str(len(f[\"MDF\"][\"images\"]))) exit() elif path.basename(path_to_file).split(\".\")[-1] in [\"mrc\", \"mrcs\", \"st\"]: data = []", "in up-down direction.(X) 2 --> flip over the column Flipped array in right-left", "column Flipped array in right-left direction(Y) 3 --> flip over the column and", "filename_ext = os.path.basename(path).split(\".\")[-1] result_list = None try: if filename_ext in {\"mrcs\", \"st\"}: with", "the smallest distance between the center and image walls radius = min(center[0], center[1],", "data = [ np.nan_to_num(f[\"MDF\"][\"images\"][str(i)][\"image\"][()]) for i in list_images ] # [()] is used", "flip over the column and the row (X and Y) otherwise --> no", "free of charge, to any person obtaining a copy of this software and", "and this permission notice shall be included in all copies or substantial portions", "tool. MIT License Copyright (c) 2019 <NAME> Institute of Molecular Physiology Permission is", "hdf/mrcs/st file. It will be converted in list of integer :param path: :return:", "and to permit persons to whom the Software is furnished to do so,", "[ path_to_file for path_to_file in path_to_files if path_to_file.endswith(\"mrcs\") or path_to_file.endswith(\"mrc\") or path_to_file.endswith(\"st\") or", "# use the middle of the image center = (int(w / 2), int(h", "result_list = list_candidate if filename_ext == \"mrc\": with mrcfile.mmap(path, permissive=True, mode=\"r\") as mrc:", "a folder or a file and it will be ignored\" ) if iterate", "file '\" + path + \" is not an HDF file with the", "if mrc.header.nz > 1: if len(list_images)==1: data = np.nan_to_num(mrc.data[list_images[0]]) else: data = [np.nan_to_num(mrc.data[i])", "= [ np.nan_to_num(f[\"MDF\"][\"images\"][str(i)][\"image\"][()]) for i in list_images ] # [()] is used instead", "= [mrc.data[i] for i in list_images] elif len(list_images) == 1: data = [mrc.data]", "with h5py.File(path_to_file, driver=\"core\") as f: if isinstance(list_images, list) or isinstance(list_images, tuple): data =", "no flip :return: \"\"\" if t == 1: return np.flipud(img) elif t ==", ") if iterate is True: return getList_files(list_new_paths) return list_new_paths def getList_relevant_files(path_to_files): \"\"\" Check", "mrc file. It will be ignored\" ) if filename_ext == \"hdf\": try: with", "\" is not an HDF file with the following format:\\n\\t['MDF']['images']. It will be", "not an valid mrc file. It will be ignored\" ) if filename_ext ==", "the following: ['MDF']['images']['i']['image'] where i is a number representing the i-th images hence", "following images\") print(list_images) print(\"there are \" + str(len(f[\"MDF\"][\"images\"]))) exit() elif path.basename(path_to_file).split(\".\")[-1] in [\"mrc\",", "\"WARNING in get_list_images: the file '\" + path + \" is not an", "rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of", "= [] with mrcfile.mmap(path_to_file, permissive=True, mode=\"r\") as mrc: if isinstance(list_images, int): list_images =", "str(p) + \"' is not a folder or a file and it will", "the following images\") print(list_images) print(\"there are \" + str(len(f[\"MDF\"][\"images\"]))) exit() elif path.basename(path_to_file).split(\".\")[-1] in", "path_to_files if path_to_file.endswith(\"mrcs\") or path_to_file.endswith(\"mrc\") or path_to_file.endswith(\"st\") or h5py.is_hdf5(path_to_file) ] \"\"\" FUNCTION TO", ":return: \"\"\" if isinstance(path_to_files, (list, tuple)): for p in path_to_files: if not path.isfile(p):", "images\") print(list_images) print(\"there are \" + str(len(f[\"MDF\"][\"images\"]))) exit() elif path.basename(path_to_file).split(\".\")[-1] in [\"mrc\", \"mrcs\",", "i in list_images] elif len(list_images) == 1: data = [mrc.data] return data \"\"\"", "== \"hdf\": try: with h5py.File(path_to_file, driver=\"core\") as f: if isinstance(list_images, list) or isinstance(list_images,", "the resized img \"\"\" im = Image.fromarray(img) return np.array(im.resize(resize, resample=Image.BILINEAR)) def normalize_img(img): \"\"\"", "if isinstance(paths, str): paths = [paths] list_new_paths = list() iterate = False for", "if the given files are hdf/mrcs/st with a valid format. Return The list", "EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "file '\" + path + \" is not an valid mrc file. It", "list_images, tuple ): data = [ np.nan_to_num(f[\"MDF\"][\"images\"][str(i)][\"image\"][()]) for i in list_images ] #", "\"\"\" It flip the image in function of the given typ :param img:", "except Exception as e: print(e) print( \"\\nERROR in getImages_fromList_key: the file '\" +", "associated documentation files (the \"Software\"), to deal in the Software without restriction, including", "== \"mrc\": with mrcfile.mmap(path, permissive=True, mode=\"r\") as mrc: result_list = list(range(1)) except Exception", "\"hdf\": try: with h5py.File(path_to_file, 'r') as f: if isinstance(list_images, list) or isinstance( list_images,", "driver=\"core\") as f: if isinstance(list_images, list) or isinstance(list_images, tuple): data = [ f[\"MDF\"][\"images\"][str(i)][\"image\"][()]", "type of the flip 1 --> flip over the row. Flipped array in", "mean and variance :param img: :return: \"\"\" import numpy as np # img", "+ \" is not an HDF file with the following format:\\n\\t['MDF']['images']['0']['image']\" ) print(\"you", "keys of the DB. It is the output( or part of its) given", "and the row (X and Y) otherwise --> no flip :return: \"\"\" if", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE", "paths :return: \"\"\" if isinstance(paths, str): paths = [paths] list_new_paths = list() iterate", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,", "the following format:\\n\\t['MDF']['images']. It will be ignored\" ) if len(list_candidate) > 0: result_list", "as f: if isinstance(list_images, list) or isinstance( list_images, tuple ): data = [", "tuple): if mrc.header.nz > 1: data = [mrc.data[i] for i in list_images] elif", "in list_images] elif len(list_images) == 1: data = [mrc.data] return data \"\"\" FUNCTION", "FUNCTION TO MANIPULATE THE IMAGES\"\"\" def apply_mask(img, mask): mean = np.mean(img) img[mask==False]=mean return", "str): return path.isfile(path_to_files) return True def calc_2d_spectra(img): from scipy import fftpack import numpy", ":return: \"\"\" print(\"Try to list images on\", path) import os filename_ext = os.path.basename(path).split(\".\")[-1]", "is not a folder or a file and it will be ignored\" )", "list/tuple of strings:\", type(list_images), ) print(\"you try to get the following images\") print(list_images)", "exit() except Exception as e: print(e) print( \"\\nERROR in getImages_fromList_key: the file '\"", "from 'get_list_images' :return: Returns a list of numpy arrays \"\"\" data = list()", "resized img \"\"\" im = Image.fromarray(img) return np.array(im.resize(resize, resample=Image.BILINEAR)) def normalize_img(img): \"\"\" normalize", "notice shall be included in all copies or substantial portions of the Software.", "\"' is not a folder or a file and it will be ignored\"", "normalize_img(img): \"\"\" normalize the images in base of its mean and variance :param", "images hence to get the images number 5: ['MDF']['images']['5']['image'][()] \"\"\" def create_circular_mask(h, w,", ") print(\"you try to get the following images\") print(list_images) print(\"there are \" +", "\"\"\" return [ path_to_file for path_to_file in path_to_files if path_to_file.endswith(\"mrcs\") or path_to_file.endswith(\"mrc\") or", "import numpy as np F1 = fftpack.fft2(img) F2 = fftpack.fftshift(F1) psd2D = np.abs(F2)", "img def resize_img(img, resize=(76, 76)): \"\"\" Resize the given image into the given", "list of paths :return: \"\"\" if isinstance(path_to_files, (list, tuple)): for p in path_to_files:", "len(list_images) == 1: data = np.nan_to_num(mrc.data) result_data.append(data) return result_data def getImages_fromList_key_old(path_to_file, list_images): \"\"\"", "img: as numpy array :param resize: resize size :return: return the resized img", "== 1: data = np.nan_to_num(mrc.data) result_data.append(data) return result_data def getImages_fromList_key_old(path_to_file, list_images): \"\"\" Returns", "is a number representing the i-th images hence to get the images number", ":param resize: resize size :return: return the resized img \"\"\" im = Image.fromarray(img)", "<= radius return mask def checkfiles(path_to_files): \"\"\" checks if the hdf files are", "2 + (Y - center[1]) ** 2) mask = dist_from_center <= radius return", "install pillow import numpy as np import mrcfile \"\"\" The format of the", "or h5py.is_hdf5(path_to_file) ] \"\"\" FUNCTION TO READ THE HDF\"\"\" def get_key_list_images(path): \"\"\" Returns", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT", "copy of this software and associated documentation files (the \"Software\"), to deal in", "# use the smallest distance between the center and image walls radius =", "\"\"\" checks if the hdf files are in the correct path and returns", "> 0: result_list = list_candidate if filename_ext == \"mrc\": with mrcfile.mmap(path, permissive=True, mode=\"r\")", "> 1: if len(list_images)==1: data = np.nan_to_num(mrc.data[list_images[0]]) else: data = [np.nan_to_num(mrc.data[i]) for i", "result_list = list(range(1)) except Exception as e: print(e) print( \"WARNING in get_list_images: the", "list of paths :return: \"\"\" if isinstance(paths, str): paths = [paths] list_new_paths =", "substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "fftpack import numpy as np F1 = fftpack.fft2(img) F2 = fftpack.fftshift(F1) psd2D =", "the row. Flipped array in up-down direction.(X) 2 --> flip over the column", "column and the row (X and Y) otherwise --> no flip :return: \"\"\"", "print(\"there are \" + str(len(f[\"MDF\"][\"images\"]))) exit() elif path.basename(path_to_file).split(\".\")[-1] in [\"mrc\", \"mrcs\", \"st\"]: data", "Copyright (c) 2019 <NAME> Institute of Molecular Physiology Permission is hereby granted, free", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION", "is the output( or part of its) given from 'get_list_images' :return: Returns a", "result_data = list() for path_to_file, list_images in file_index_tubles: data = list() if path.isfile(path_to_file):", "getImages_fromList_key: the file '\" + path_to_file + \" is not an HDF file", "if isinstance(list_images, int): list_images = [list_images] if isinstance(list_images, list) or isinstance(list_images, tuple): if", "selection tool. MIT License Copyright (c) 2019 <NAME> Institute of Molecular Physiology Permission", "return list_new_paths def getList_relevant_files(path_to_files): \"\"\" Check if the given files are hdf/mrcs/st with", "numpy arrays \"\"\" data = list() if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try:", "DB. It is the output( or part of its) given from 'get_list_images' :return:", ":return: return the resized img \"\"\" im = Image.fromarray(img) return np.array(im.resize(resize, resample=Image.BILINEAR)) def", "obtaining a copy of this software and associated documentation files (the \"Software\"), to", "list_new_paths.append(p) else: print( \"WARNING: The given path '\" + str(p) + \"' is", "the images in base of its mean and variance :param img: :return: \"\"\"", "or a file and it will be ignored\" ) if iterate is True:", "np.nan_to_num(f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()]) else: print( \"\\nERROR in getImages_fromList_key: invalid list_images, it should be a string", "center is None: # use the middle of the image center = (int(w", "the center and image walls radius = min(center[0], center[1], w - center[0], h", "in base of its mean and variance :param img: :return: \"\"\" import numpy", "It is the output( or part of its) given from 'get_list_images' :return: Returns", "list) or isinstance(list_images, tuple): data = [ f[\"MDF\"][\"images\"][str(i)][\"image\"][()] for i in list_images ]", "copy=False) return img def flip_img(img, t=None): \"\"\" It flip the image in function", "v in list(f[\"MDF\"][\"images\"])] except: print( \"WARNING in get_list_images: the file '\" + path", "TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "format of the .hf file is the following: ['MDF']['images']['i']['image'] where i is a", "in {\"mrcs\", \"st\"}: with mrcfile.mmap(path, permissive=True, mode=\"r\") as mrc: list_candidate = [i for", "np.nan_to_num(mrc.data) result_data.append(data) return result_data def getImages_fromList_key_old(path_to_file, list_images): \"\"\" Returns the images in the", "images in base of its mean and variance :param img: :return: \"\"\" import", "listdir import h5py from PIL import Image # install it via pip install", "(X and Y) otherwise --> no flip :return: \"\"\" if t == 1:", "+ \" is not an HDF file with the following format:\\n\\t['MDF']['images']. It will", "data \"\"\" FUNCTION TO MANIPULATE THE IMAGES\"\"\" def apply_mask(img, mask): mean = np.mean(img)", "t=None): \"\"\" It flip the image in function of the given typ :param", "'\" + path + \" is not an valid mrc file. It will", "isinstance(path_to_files, (list, tuple)): for p in path_to_files: if not path.isfile(p): return False elif", "OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "on\", path) import os filename_ext = os.path.basename(path).split(\".\")[-1] result_list = None try: if filename_ext", "= dist_from_center <= radius return mask def checkfiles(path_to_files): \"\"\" checks if the hdf", "None try: if filename_ext in {\"mrcs\", \"st\"}: with mrcfile.mmap(path, permissive=True, mode=\"r\") as mrc:", "= fftpack.fft2(img) F2 = fftpack.fftshift(F1) psd2D = np.abs(F2) ** 2 return psd2D def", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR", "converted in list of integer :param path: :return: \"\"\" print(\"Try to list images", "> 0: result_list = list_candidate return result_list def getImages_fromList_key(file_index_tubles): \"\"\" Returns the images", "Returns a list of numpy arrays \"\"\" data = list() if path.isfile(path_to_file): if", "Y) otherwise --> no flip :return: \"\"\" if t == 1: return np.flipud(img)", "OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "** 2 return psd2D def getList_files(paths): \"\"\" Returns the list of the valid", "an HDF file with the following format:\\n\\t['MDF']['images']. It will be ignored\" ) if", "1: data = [mrc.data] return data \"\"\" FUNCTION TO MANIPULATE THE IMAGES\"\"\" def", "--> no flip :return: \"\"\" if t == 1: return np.flipud(img) elif t", "publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons", "hdf \"\"\" return [ path_to_file for path_to_file in path_to_files if path_to_file.endswith(\"mrcs\") or path_to_file.endswith(\"mrc\")", "\"hdf\": try: with h5py.File(path_to_file, driver=\"core\") as f: if isinstance(list_images, list) or isinstance(list_images, tuple):", "including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,", "for p in paths: if path.isdir(p): iterate = True list_new_paths += [path.join(p, f)", "== \"hdf\": try: with h5py.File(path, \"r\") as f: list_candidate = [int(v) for v", "file_index_tubles: data = list() if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try: with h5py.File(path_to_file,", "getList_files(paths): \"\"\" Returns the list of the valid hdf files in the given", "= np.nan_to_num(mrc.data[list_images[0]]) else: data = [np.nan_to_num(mrc.data[i]) for i in list_images] elif len(list_images) ==", "getImages_fromList_key_old(path_to_file, list_images): \"\"\" Returns the images in the hdf file (path_to_file) listed in", "isinstance(list_images, tuple): data = [ f[\"MDF\"][\"images\"][str(i)][\"image\"][()] for i in list_images ] # [()]", "or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", ":param paths: path or list of paths :return: \"\"\" if isinstance(paths, str): paths", "in right-left direction(Y) 3 --> flip over the column and the row (X", "path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try: with h5py.File(path_to_file, driver=\"core\") as f: if isinstance(list_images, list) or", "given files are hdf/mrcs/st with a valid format. Return The list of valid", "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from os", "the given size :param img: as numpy array :param resize: resize size :return:", "'\" + str(p) + \"' is not a folder or a file and", "as np # img = img.astype(np.float64, copy=False) mean = np.mean(img) std = np.std(img)", "be converted in list of integer :param path: :return: \"\"\" print(\"Try to list", "center[0], h - center[1]) Y, X = np.ogrid[:h, :w] dist_from_center = np.sqrt((X -", "with mrcfile.mmap(path, permissive=True, mode=\"r\") as mrc: result_list = list(range(1)) except Exception as e:", "len(list_candidate) > 0: result_list = list_candidate return result_list def getImages_fromList_key(file_index_tubles): \"\"\" Returns the", "list_candidate if filename_ext == \"mrc\": with mrcfile.mmap(path, permissive=True, mode=\"r\") as mrc: result_list =", "will be ignored\" ) if filename_ext == \"hdf\": try: with h5py.File(path, \"r\") as", "h5py.is_hdf5(path_to_file) ] \"\"\" FUNCTION TO READ THE HDF\"\"\" def get_key_list_images(path): \"\"\" Returns the", "1: return np.flipud(img) elif t == 2: return np.fliplr(img) elif t == 3:", "psd2D def getList_files(paths): \"\"\" Returns the list of the valid hdf files in", "all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS", "SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "path_to_files: if not path.isfile(p): return False elif isinstance(path_to_files, str): return path.isfile(path_to_files) return True", ":param path: :return: \"\"\" print(\"Try to list images on\", path) import os filename_ext", "[mrc.data] return data \"\"\" FUNCTION TO MANIPULATE THE IMAGES\"\"\" def apply_mask(img, mask): mean", "path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try: with h5py.File(path_to_file, 'r') as f: if isinstance(list_images, list) or", "right-left direction(Y) 3 --> flip over the column and the row (X and", "License Copyright (c) 2019 <NAME> Institute of Molecular Physiology Permission is hereby granted,", "of its) given from 'get_list_images' :return: Returns a list of numpy arrays \"\"\"", "if isinstance(list_images, list) or isinstance(list_images, tuple): if mrc.header.nz > 1: data = [mrc.data[i]", "for i in list_images ] # [()] is used instead of .value elif", "pip install pillow import numpy as np import mrcfile \"\"\" The format of", "= [int(v) for v in list(f[\"MDF\"][\"images\"])] except: print( \"WARNING in get_list_images: the file", "elif isinstance(list_images, int): data = f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()] else: print( \"\\nERROR in getImages_fromList_key: invalid list_images,", "path.basename(path_to_file).split(\".\")[-1] in [\"mrc\", \"mrcs\", \"st\"]: data = [] with mrcfile.mmap(path_to_file, permissive=True, mode=\"r\") as", "mode=\"r\") as mrc: list_candidate = [i for i in range(mrc.header.nz)] if len(list_candidate) >", "with h5py.File(path, \"r\") as f: list_candidate = [int(v) for v in list(f[\"MDF\"][\"images\"])] except:", "print(e) print( \"\\nERROR in getImages_fromList_key: the file '\" + path_to_file + \" is", "h5py.File(path_to_file, driver=\"core\") as f: if isinstance(list_images, list) or isinstance(list_images, tuple): data = [", "the folder (and subfolder)given from the user :return: list of valid hdf \"\"\"", "import path, listdir import h5py from PIL import Image # install it via", "OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "\"WARNING: The given path '\" + str(p) + \"' is not a folder", "if len(list_candidate) > 0: result_list = list_candidate return result_list def getImages_fromList_key(file_index_tubles): \"\"\" Returns", "Flipped array in up-down direction.(X) 2 --> flip over the column Flipped array", "# img = img.astype(np.float32, copy=False) return img def flip_img(img, t=None): \"\"\" It flip", "1: data = [mrc.data[i] for i in list_images] elif len(list_images) == 1: data", "for i in range(mrc.header.nz)] if len(list_candidate) > 0: result_list = list_candidate if filename_ext", "in path_to_files if path_to_file.endswith(\"mrcs\") or path_to_file.endswith(\"mrc\") or path_to_file.endswith(\"st\") or h5py.is_hdf5(path_to_file) ] \"\"\" FUNCTION", "\"st\"]: data = [] with mrcfile.mmap(path_to_file, permissive=True, mode=\"r\") as mrc: if isinstance(list_images, int):", "= np.ogrid[:h, :w] dist_from_center = np.sqrt((X - center[0]) ** 2 + (Y -", "middle of the image center = (int(w / 2), int(h / 2)) if", "list() if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try: with h5py.File(path_to_file, 'r') as f:", "in list_images ] # [()] is used instead of .value elif isinstance(list_images, int):", "get_list_images: the file '\" + path + \" is not an valid mrc", "instead of .value elif isinstance(list_images, int): data = f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()] else: print( \"\\nERROR in", "recursively :param paths: path or list of paths :return: \"\"\" if isinstance(paths, str):", "h - center[1]) Y, X = np.ogrid[:h, :w] dist_from_center = np.sqrt((X - center[0])", "result_data def getImages_fromList_key_old(path_to_file, list_images): \"\"\" Returns the images in the hdf file (path_to_file)", ":return: Returns a list of numpy arrays \"\"\" data = list() if path.isfile(path_to_file):", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the", "list of keys of the DB. It is the output( or part of", "- center[0]) ** 2 + (Y - center[1]) ** 2) mask = dist_from_center", "numpy as np F1 = fftpack.fft2(img) F2 = fftpack.fftshift(F1) psd2D = np.abs(F2) **", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN", "] \"\"\" FUNCTION TO READ THE HDF\"\"\" def get_key_list_images(path): \"\"\" Returns the list", "list of valid hdf :param path_to_files: list of all the files present in", "or isinstance(list_images, tuple): if mrc.header.nz > 1: data = [mrc.data[i] for i in", "via pip install pillow import numpy as np import mrcfile \"\"\" The format", "format:\\n\\t['MDF']['images']. It will be ignored\" ) if len(list_candidate) > 0: result_list = list_candidate", "ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "(list_images) :param path_to_file: path to hdf file :param list_images: list of keys of", "getImages_fromList_key: invalid list_images, it should be a string or a list/tuple of strings:\",", "== 1: return np.flipud(img) elif t == 2: return np.fliplr(img) elif t ==", "np.sqrt((X - center[0]) ** 2 + (Y - center[1]) ** 2) mask =", "center[0]) ** 2 + (Y - center[1]) ** 2) mask = dist_from_center <=", "+ str(len(f[\"MDF\"][\"images\"]))) exit() elif path.basename(path_to_file).split(\".\")[-1] in [\"mrc\", \"mrcs\", \"st\"]: data = [] with", "get_key_list_images(path): \"\"\" Returns the list of the keys representing the images in the", "OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "\"\"\" Returns the list of the valid hdf files in the given paths.", "print(\"you try to get the following images\") print(list_images) exit() except Exception as e:", "will be ignored\" ) if iterate is True: return getList_files(list_new_paths) return list_new_paths def", "from os import path, listdir import h5py from PIL import Image # install", "True list_new_paths += [path.join(p, f) for f in listdir(p)] elif path.isfile(p): list_new_paths.append(p) else:", "for path_to_file in path_to_files if path_to_file.endswith(\"mrcs\") or path_to_file.endswith(\"mrc\") or path_to_file.endswith(\"st\") or h5py.is_hdf5(path_to_file) ]", "the file '\" + path_to_file + \" is not an HDF file with", "above copyright notice and this permission notice shall be included in all copies", "return result_data def getImages_fromList_key_old(path_to_file, list_images): \"\"\" Returns the images in the hdf file", "direction(Y) 3 --> flip over the column and the row (X and Y)", "path_to_file for path_to_file in path_to_files if path_to_file.endswith(\"mrcs\") or path_to_file.endswith(\"mrc\") or path_to_file.endswith(\"st\") or h5py.is_hdf5(path_to_file)", "isinstance(list_images, int): data = f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()] else: print( \"\\nERROR in getImages_fromList_key: invalid list_images, it", "a number representing the i-th images hence to get the images number 5:", "path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try: with h5py.File(path_to_file, 'r') as f: if isinstance(list_images,", "valid hdf \"\"\" return [ path_to_file for path_to_file in path_to_files if path_to_file.endswith(\"mrcs\") or", "list_images: list of keys of the DB. It is the output( or part", "and image walls radius = min(center[0], center[1], w - center[0], h - center[1])", "data = [ f[\"MDF\"][\"images\"][str(i)][\"image\"][()] for i in list_images ] # [()] is used", "= np.mean(img) img[mask==False]=mean return img def resize_img(img, resize=(76, 76)): \"\"\" Resize the given", "following images\") print(list_images) exit() except Exception as e: print(e) print( \"\\nERROR in getImages_fromList_key:", "WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "std = np.std(img) img = (img - mean) / (std+0.00001) # img =", "= np.sqrt((X - center[0]) ** 2 + (Y - center[1]) ** 2) mask", "to list images on\", path) import os filename_ext = os.path.basename(path).split(\".\")[-1] result_list = None", "os filename_ext = os.path.basename(path).split(\".\")[-1] result_list = None try: if filename_ext in {\"mrcs\", \"st\"}:", "getList_files(list_new_paths) return list_new_paths def getList_relevant_files(path_to_files): \"\"\" Check if the given files are hdf/mrcs/st", "def calc_2d_spectra(img): from scipy import fftpack import numpy as np F1 = fftpack.fft2(img)", "OTHER DEALINGS IN THE SOFTWARE. \"\"\" from os import path, listdir import h5py", "the images number 5: ['MDF']['images']['5']['image'][()] \"\"\" def create_circular_mask(h, w, center=None, radius=None): if center", "= None try: if filename_ext in {\"mrcs\", \"st\"}: with mrcfile.mmap(path, permissive=True, mode=\"r\") as", ":return: \"\"\" import numpy as np # img = img.astype(np.float64, copy=False) mean =", "return img def flip_img(img, t=None): \"\"\" It flip the image in function of", "is not an valid mrc file. It will be ignored\" ) if filename_ext", "the hdf file (path_to_file) listed in (list_images) :param path_to_file: path to hdf file", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS", "permission notice shall be included in all copies or substantial portions of the", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS", "if iterate is True: return getList_files(list_new_paths) return list_new_paths def getList_relevant_files(path_to_files): \"\"\" Check if", "list_images = [list_images] if isinstance(list_images, list) or isinstance(list_images, tuple): if mrc.header.nz > 1:", "in the correct path and returns True if all of them exists :param", "and variance :param img: :return: \"\"\" import numpy as np # img =", "# [()] is used instead of .value elif isinstance(list_images, int): data = np.nan_to_num(f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()])", "as e: print(e) print( \"\\nERROR in getImages_fromList_key: the file '\" + path_to_file +", "elif len(list_images) == 1: data = [mrc.data] return data \"\"\" FUNCTION TO MANIPULATE", "array in right-left direction(Y) 3 --> flip over the column and the row", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH", "PIL import Image # install it via pip install pillow import numpy as", "fftpack.fftshift(F1) psd2D = np.abs(F2) ** 2 return psd2D def getList_files(paths): \"\"\" Returns the", "import numpy as np import mrcfile \"\"\" The format of the .hf file", "\"\"\" FUNCTION TO MANIPULATE THE IMAGES\"\"\" def apply_mask(img, mask): mean = np.mean(img) img[mask==False]=mean", "the column Flipped array in right-left direction(Y) 3 --> flip over the column", "the output( or part of its) given from 'get_list_images' :return: Returns a list", "return psd2D def getList_files(paths): \"\"\" Returns the list of the valid hdf files", "Image # install it via pip install pillow import numpy as np import", "valid format. Return The list of valid hdf :param path_to_files: list of all", "== \"hdf\": try: with h5py.File(path_to_file, 'r') as f: if isinstance(list_images, list) or isinstance(", "if center is None: # use the middle of the image center =", "the following conditions: The above copyright notice and this permission notice shall be", "= np.nan_to_num(mrc.data) result_data.append(data) return result_data def getImages_fromList_key_old(path_to_file, list_images): \"\"\" Returns the images in", "if mrc.header.nz > 1: data = [mrc.data[i] for i in list_images] elif len(list_images)", "files are hdf/mrcs/st with a valid format. Return The list of valid hdf", "Y, X = np.ogrid[:h, :w] dist_from_center = np.sqrt((X - center[0]) ** 2 +", "representing the images in the hdf/mrcs/st file. It will be converted in list", ".hf file is the following: ['MDF']['images']['i']['image'] where i is a number representing the", "paths: path or list of paths :return: \"\"\" if isinstance(paths, str): paths =", "mrc: result_list = list(range(1)) except Exception as e: print(e) print( \"WARNING in get_list_images:", "center[1]) ** 2) mask = dist_from_center <= radius return mask def checkfiles(path_to_files): \"\"\"", "= [mrc.data] return data \"\"\" FUNCTION TO MANIPULATE THE IMAGES\"\"\" def apply_mask(img, mask):", "img: :param t: type of the flip 1 --> flip over the row.", "use the smallest distance between the center and image walls radius = min(center[0],", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT", "import Image # install it via pip install pillow import numpy as np", "f: if isinstance(list_images, list) or isinstance( list_images, tuple ): data = [ np.nan_to_num(f[\"MDF\"][\"images\"][str(i)][\"image\"][()])", "furnished to do so, subject to the following conditions: The above copyright notice", "dist_from_center = np.sqrt((X - center[0]) ** 2 + (Y - center[1]) ** 2)", "- center[1]) Y, X = np.ogrid[:h, :w] dist_from_center = np.sqrt((X - center[0]) **", "getList_relevant_files(path_to_files): \"\"\" Check if the given files are hdf/mrcs/st with a valid format.", "driver=\"core\" result_data = list() for path_to_file, list_images in file_index_tubles: data = list() if", "of the keys representing the images in the hdf/mrcs/st file. It will be", "1 --> flip over the row. Flipped array in up-down direction.(X) 2 -->", "[path.join(p, f) for f in listdir(p)] elif path.isfile(p): list_new_paths.append(p) else: print( \"WARNING: The", "permit persons to whom the Software is furnished to do so, subject to", "try: with h5py.File(path_to_file, 'r') as f: if isinstance(list_images, list) or isinstance( list_images, tuple", "= (int(w / 2), int(h / 2)) if radius is None: # use", "(c) 2019 <NAME> Institute of Molecular Physiology Permission is hereby granted, free of", "any person obtaining a copy of this software and associated documentation files (the", "if isinstance(path_to_files, (list, tuple)): for p in path_to_files: if not path.isfile(p): return False", "file. It will be ignored\" ) if filename_ext == \"hdf\": try: with h5py.File(path,", "+= [path.join(p, f) for f in listdir(p)] elif path.isfile(p): list_new_paths.append(p) else: print( \"WARNING:", "mrcfile.mmap(path_to_file, permissive=True, mode=\"r\") as mrc: if isinstance(list_images, int): list_images = [list_images] if isinstance(list_images,", "list of the valid hdf files in the given paths. It is called", "all the files present in the folder (and subfolder)given from the user :return:", "type(list_images), ) print(\"you try to get the following images\") print(list_images) exit() except Exception", "isinstance(list_images, tuple): if mrc.header.nz > 1: data = [mrc.data[i] for i in list_images]", "copies of the Software, and to permit persons to whom the Software is", "for f in listdir(p)] elif path.isfile(p): list_new_paths.append(p) else: print( \"WARNING: The given path", "of paths :return: \"\"\" if isinstance(path_to_files, (list, tuple)): for p in path_to_files: if", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", ") if len(list_candidate) > 0: result_list = list_candidate return result_list def getImages_fromList_key(file_index_tubles): \"\"\"", "img = (img - mean) / (std+0.00001) # img = img.astype(np.float32, copy=False) return", "included in all copies or substantial portions of the Software. THE SOFTWARE IS", "= list() if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try: with h5py.File(path_to_file, 'r') as", "int): data = np.nan_to_num(f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()]) else: print( \"\\nERROR in getImages_fromList_key: invalid list_images, it should", "function of the given typ :param img: :param t: type of the flip", "np # img = img.astype(np.float64, copy=False) mean = np.mean(img) std = np.std(img) img", "def checkfiles(path_to_files): \"\"\" checks if the hdf files are in the correct path", "image walls radius = min(center[0], center[1], w - center[0], h - center[1]) Y,", "of .value elif isinstance(list_images, int): data = np.nan_to_num(f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()]) else: print( \"\\nERROR in getImages_fromList_key:", "exists :param path_to_files: list of paths :return: \"\"\" if isinstance(path_to_files, (list, tuple)): for", "copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and", "file with the following format:\\n\\t['MDF']['images']['0']['image']\" ) print(\"you try to get the following images\")", "are \" + str(len(f[\"MDF\"][\"images\"]))) exit() elif path.basename(path_to_file).split(\".\")[-1] in [\"mrc\", \"mrcs\", \"st\"]: data =", "used instead of .value elif isinstance(list_images, int): data = f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()] else: print( \"\\nERROR", "f: list_candidate = [int(v) for v in list(f[\"MDF\"][\"images\"])] except: print( \"WARNING in get_list_images:", "file (path_to_file) listed in (list_images) :param path_to_file: path to hdf file :param list_images:", "THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "iterate is True: return getList_files(list_new_paths) return list_new_paths def getList_relevant_files(path_to_files): \"\"\" Check if the", "mask = dist_from_center <= radius return mask def checkfiles(path_to_files): \"\"\" checks if the", "filename_ext == \"mrc\": with mrcfile.mmap(path, permissive=True, mode=\"r\") as mrc: result_list = list(range(1)) except", "(Y - center[1]) ** 2) mask = dist_from_center <= radius return mask def", "+ \" is not an valid mrc file. It will be ignored\" )", "= False for p in paths: if path.isdir(p): iterate = True list_new_paths +=", "the list of the keys representing the images in the hdf/mrcs/st file. It", "elif t == 2: return np.fliplr(img) elif t == 3: return np.flipud(np.fliplr(img)) return", "2) mask = dist_from_center <= radius return mask def checkfiles(path_to_files): \"\"\" checks if", "the Software, and to permit persons to whom the Software is furnished to", "its mean and variance :param img: :return: \"\"\" import numpy as np #", "return getList_files(list_new_paths) return list_new_paths def getList_relevant_files(path_to_files): \"\"\" Check if the given files are", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN", "t: type of the flip 1 --> flip over the row. Flipped array", "= True list_new_paths += [path.join(p, f) for f in listdir(p)] elif path.isfile(p): list_new_paths.append(p)", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,", "result_data.append(data) return result_data def getImages_fromList_key_old(path_to_file, list_images): \"\"\" Returns the images in the hdf", "following conditions: The above copyright notice and this permission notice shall be included", "list_new_paths += [path.join(p, f) for f in listdir(p)] elif path.isfile(p): list_new_paths.append(p) else: print(", "def get_key_list_images(path): \"\"\" Returns the list of the keys representing the images in", "center[1], w - center[0], h - center[1]) Y, X = np.ogrid[:h, :w] dist_from_center", "except: print( \"WARNING in get_list_images: the file '\" + path + \" is", "if t == 1: return np.flipud(img) elif t == 2: return np.fliplr(img) elif", "the hdf/mrcs/st file. It will be converted in list of integer :param path:", "copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\",", "format. Return The list of valid hdf :param path_to_files: list of all the", "= [paths] list_new_paths = list() iterate = False for p in paths: if", "data = np.nan_to_num(mrc.data[list_images[0]]) else: data = [np.nan_to_num(mrc.data[i]) for i in list_images] elif len(list_images)", "isinstance(list_images, int): data = np.nan_to_num(f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()]) else: print( \"\\nERROR in getImages_fromList_key: invalid list_images, it", "NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "the middle of the image center = (int(w / 2), int(h / 2))", "a string or a list/tuple of strings:\", type(list_images), ) print(\"you try to get", "THE IMAGES\"\"\" def apply_mask(img, mask): mean = np.mean(img) img[mask==False]=mean return img def resize_img(img,", "= (img - mean) / (std+0.00001) # img = img.astype(np.float32, copy=False) return img", "Return The list of valid hdf :param path_to_files: list of all the files", ") print(\"you try to get the following images\") print(list_images) exit() except Exception as", "The above copyright notice and this permission notice shall be included in all", "w - center[0], h - center[1]) Y, X = np.ogrid[:h, :w] dist_from_center =", "list_images): \"\"\" Returns the images in the hdf file (path_to_file) listed in (list_images)", "MIT License Copyright (c) 2019 <NAME> Institute of Molecular Physiology Permission is hereby", "THE HDF\"\"\" def get_key_list_images(path): \"\"\" Returns the list of the keys representing the", "to get the following images\") print(list_images) exit() except Exception as e: print(e) print(", "] # [()] is used instead of .value elif isinstance(list_images, int): data =", "\"\"\" from os import path, listdir import h5py from PIL import Image #", "numpy as np import mrcfile \"\"\" The format of the .hf file is", "\"\"\" # driver=\"core\" result_data = list() for path_to_file, list_images in file_index_tubles: data =", "following: ['MDF']['images']['i']['image'] where i is a number representing the i-th images hence to", "path and returns True if all of them exists :param path_to_files: list of", "= np.nan_to_num(f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()]) else: print( \"\\nERROR in getImages_fromList_key: invalid list_images, it should be a", "resize=(76, 76)): \"\"\" Resize the given image into the given size :param img:", "img.astype(np.float64, copy=False) mean = np.mean(img) std = np.std(img) img = (img - mean)", "'\" + path_to_file + \" is not an HDF file with the following", "if filename_ext == \"hdf\": try: with h5py.File(path, \"r\") as f: list_candidate = [int(v)", "copy=False) mean = np.mean(img) std = np.std(img) img = (img - mean) /", "\"Software\"), to deal in the Software without restriction, including without limitation the rights", "\"r\") as f: list_candidate = [int(v) for v in list(f[\"MDF\"][\"images\"])] except: print( \"WARNING", "= f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()] else: print( \"\\nERROR in getImages_fromList_key: invalid list_images, it should be a", "deal in the Software without restriction, including without limitation the rights to use,", "files are in the correct path and returns True if all of them", "psd2D = np.abs(F2) ** 2 return psd2D def getList_files(paths): \"\"\" Returns the list", "following format:\\n\\t['MDF']['images']['0']['image']\" ) print(\"you try to get the following images\") print(list_images) print(\"there are", "granted, free of charge, to any person obtaining a copy of this software", "limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "elif path.basename(path_to_file).split(\".\")[-1] in [\"mrc\", \"mrcs\", \"st\"]: data = [] with mrcfile.mmap(path_to_file, permissive=True, mode=\"r\")", "mode=\"r\") as mrc: result_list = list(range(1)) except Exception as e: print(e) print( \"WARNING", "The format of the .hf file is the following: ['MDF']['images']['i']['image'] where i is", "calc_2d_spectra(img): from scipy import fftpack import numpy as np F1 = fftpack.fft2(img) F2", "string or a list/tuple of strings:\", type(list_images), ) print(\"you try to get the", "int): list_images = [list_images] if isinstance(list_images, list) or isinstance(list_images, tuple): if mrc.header.nz >", "def create_circular_mask(h, w, center=None, radius=None): if center is None: # use the middle", "mrc: list_candidate = [i for i in range(mrc.header.nz)] if len(list_candidate) > 0: result_list", "Molecular Physiology Permission is hereby granted, free of charge, to any person obtaining", "if len(list_images)==1: data = np.nan_to_num(mrc.data[list_images[0]]) else: data = [np.nan_to_num(mrc.data[i]) for i in list_images]", "valid hdf :param path_to_files: list of all the files present in the folder", "X = np.ogrid[:h, :w] dist_from_center = np.sqrt((X - center[0]) ** 2 + (Y", "isinstance(path_to_files, str): return path.isfile(path_to_files) return True def calc_2d_spectra(img): from scipy import fftpack import", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "(path_to_file) listed in (list_images) :param path_to_file: path to hdf file :param list_images: list", "file :param list_images: list of keys of the DB. It is the output(", "data = np.nan_to_num(mrc.data) result_data.append(data) return result_data def getImages_fromList_key_old(path_to_file, list_images): \"\"\" Returns the images", "if isinstance(list_images, list) or isinstance(list_images, tuple): data = [ f[\"MDF\"][\"images\"][str(i)][\"image\"][()] for i in", "[list_images] if isinstance(list_images, list) or isinstance(list_images, tuple): if mrc.header.nz > 1: if len(list_images)==1:", "of paths :return: \"\"\" if isinstance(paths, str): paths = [paths] list_new_paths = list()", "ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "list_new_paths = list() iterate = False for p in paths: if path.isdir(p): iterate", "it will be ignored\" ) if iterate is True: return getList_files(list_new_paths) return list_new_paths", "of this software and associated documentation files (the \"Software\"), to deal in the", "if filename_ext == \"mrc\": with mrcfile.mmap(path, permissive=True, mode=\"r\") as mrc: result_list = list(range(1))", "len(list_images) == 1: data = [mrc.data] return data \"\"\" FUNCTION TO MANIPULATE THE", "in file_index_tubles: data = list() if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try: with", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO", "in paths: if path.isdir(p): iterate = True list_new_paths += [path.join(p, f) for f", "images in the hdf file (path_to_file) listed in (list_images) :param path_to_file: path to", "or path_to_file.endswith(\"st\") or h5py.is_hdf5(path_to_file) ] \"\"\" FUNCTION TO READ THE HDF\"\"\" def get_key_list_images(path):", "def getList_relevant_files(path_to_files): \"\"\" Check if the given files are hdf/mrcs/st with a valid", "list() for path_to_file, list_images in file_index_tubles: data = list() if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1]", "sell copies of the Software, and to permit persons to whom the Software", "be ignored\" ) if iterate is True: return getList_files(list_new_paths) return list_new_paths def getList_relevant_files(path_to_files):", "as f: list_candidate = [int(v) for v in list(f[\"MDF\"][\"images\"])] except: print( \"WARNING in", "list() if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try: with h5py.File(path_to_file, driver=\"core\") as f:", "with a valid format. Return The list of valid hdf :param path_to_files: list", "img[mask==False]=mean return img def resize_img(img, resize=(76, 76)): \"\"\" Resize the given image into", "np.mean(img) std = np.std(img) img = (img - mean) / (std+0.00001) # img", "try: with h5py.File(path, \"r\") as f: list_candidate = [int(v) for v in list(f[\"MDF\"][\"images\"])]", "or list of paths :return: \"\"\" if isinstance(paths, str): paths = [paths] list_new_paths", "of keys of the DB. It is the output( or part of its)", "folder or a file and it will be ignored\" ) if iterate is", "np.flipud(img) elif t == 2: return np.fliplr(img) elif t == 3: return np.flipud(np.fliplr(img))", "of integer :param path: :return: \"\"\" print(\"Try to list images on\", path) import", "import os filename_ext = os.path.basename(path).split(\".\")[-1] result_list = None try: if filename_ext in {\"mrcs\",", "flip_img(img, t=None): \"\"\" It flip the image in function of the given typ", "path_to_file.endswith(\"mrcs\") or path_to_file.endswith(\"mrc\") or path_to_file.endswith(\"st\") or h5py.is_hdf5(path_to_file) ] \"\"\" FUNCTION TO READ THE", "with the following format:\\n\\t['MDF']['images']['0']['image']\" ) print(\"you try to get the following images\") print(list_images)", "ignored\" ) if iterate is True: return getList_files(list_new_paths) return list_new_paths def getList_relevant_files(path_to_files): \"\"\"", "2)) if radius is None: # use the smallest distance between the center", "size :return: return the resized img \"\"\" im = Image.fromarray(img) return np.array(im.resize(resize, resample=Image.BILINEAR))", "None: # use the smallest distance between the center and image walls radius", "str(len(f[\"MDF\"][\"images\"]))) exit() elif path.basename(path_to_file).split(\".\")[-1] in [\"mrc\", \"mrcs\", \"st\"]: data = [] with mrcfile.mmap(path_to_file,", "do so, subject to the following conditions: The above copyright notice and this", "np import mrcfile \"\"\" The format of the .hf file is the following:", "array :param resize: resize size :return: return the resized img \"\"\" im =", "walls radius = min(center[0], center[1], w - center[0], h - center[1]) Y, X", "is the following: ['MDF']['images']['i']['image'] where i is a number representing the i-th images", "center[1]) Y, X = np.ogrid[:h, :w] dist_from_center = np.sqrt((X - center[0]) ** 2", "= os.path.basename(path).split(\".\")[-1] result_list = None try: if filename_ext in {\"mrcs\", \"st\"}: with mrcfile.mmap(path,", "listdir(p)] elif path.isfile(p): list_new_paths.append(p) else: print( \"WARNING: The given path '\" + str(p)", "center=None, radius=None): if center is None: # use the middle of the image", "given typ :param img: :param t: type of the flip 1 --> flip", "the file '\" + path + \" is not an valid mrc file.", "is furnished to do so, subject to the following conditions: The above copyright", "result_list = list_candidate return result_list def getImages_fromList_key(file_index_tubles): \"\"\" Returns the images in the", "The given path '\" + str(p) + \"' is not a folder or", "data = [np.nan_to_num(mrc.data[i]) for i in list_images] elif len(list_images) == 1: data =", "--> flip over the row. Flipped array in up-down direction.(X) 2 --> flip", "data = list() if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try: with h5py.File(path_to_file, 'r')", "** 2 + (Y - center[1]) ** 2) mask = dist_from_center <= radius", "the images in the hdf/mrcs/st file. It will be converted in list of", "following format:\\n\\t['MDF']['images']. It will be ignored\" ) if len(list_candidate) > 0: result_list =", "None: # use the middle of the image center = (int(w / 2),", "the hdf files are in the correct path and returns True if all", "+ str(p) + \"' is not a folder or a file and it", "2), int(h / 2)) if radius is None: # use the smallest distance", "tuple)): for p in path_to_files: if not path.isfile(p): return False elif isinstance(path_to_files, str):", "isinstance(paths, str): paths = [paths] list_new_paths = list() iterate = False for p", "will be converted in list of integer :param path: :return: \"\"\" print(\"Try to", "is used instead of .value elif isinstance(list_images, int): data = f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()] else: print(", "listed in (list_images) :param path_to_file: path to hdf file :param list_images: list of", "files present in the folder (and subfolder)given from the user :return: list of", "= img.astype(np.float32, copy=False) return img def flip_img(img, t=None): \"\"\" It flip the image", "so, subject to the following conditions: The above copyright notice and this permission", "if the hdf files are in the correct path and returns True if", "in getImages_fromList_key: the file '\" + path_to_file + \" is not an HDF", "Flipped array in right-left direction(Y) 3 --> flip over the column and the", "list_images in file_index_tubles: data = list() if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try:", "from 'get_list_images' :return: Returns a list of numpy arrays \"\"\" # driver=\"core\" result_data", "tuple): data = [ f[\"MDF\"][\"images\"][str(i)][\"image\"][()] for i in list_images ] # [()] is", "as e: print(e) print( \"WARNING in get_list_images: the file '\" + path +", "paths = [paths] list_new_paths = list() iterate = False for p in paths:", "in list of integer :param path: :return: \"\"\" print(\"Try to list images on\",", "returns True if all of them exists :param path_to_files: list of paths :return:", "2019 <NAME> Institute of Molecular Physiology Permission is hereby granted, free of charge,", "path '\" + str(p) + \"' is not a folder or a file", "and it will be ignored\" ) if iterate is True: return getList_files(list_new_paths) return", "= [list_images] if isinstance(list_images, list) or isinstance(list_images, tuple): if mrc.header.nz > 1: if", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR", "the image center = (int(w / 2), int(h / 2)) if radius is", "+ path_to_file + \" is not an HDF file with the following format:\\n\\t['MDF']['images']['0']['image']\"", "h5py.File(path_to_file, 'r') as f: if isinstance(list_images, list) or isinstance( list_images, tuple ): data", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR", "dist_from_center <= radius return mask def checkfiles(path_to_files): \"\"\" checks if the hdf files", "f[\"MDF\"][\"images\"][str(i)][\"image\"][()] for i in list_images ] # [()] is used instead of .value", "of the .hf file is the following: ['MDF']['images']['i']['image'] where i is a number", "path_to_file.endswith(\"st\") or h5py.is_hdf5(path_to_file) ] \"\"\" FUNCTION TO READ THE HDF\"\"\" def get_key_list_images(path): \"\"\"", "return img def resize_img(img, resize=(76, 76)): \"\"\" Resize the given image into the", "of the given typ :param img: :param t: type of the flip 1", "of the Software, and to permit persons to whom the Software is furnished", "the image in function of the given typ :param img: :param t: type", "and/or sell copies of the Software, and to permit persons to whom the", "in (list_images) :param path_to_file: path to hdf file :param list_images: list of keys", "resize_img(img, resize=(76, 76)): \"\"\" Resize the given image into the given size :param", "i in list_images] elif len(list_images) == 1: data = np.nan_to_num(mrc.data) result_data.append(data) return result_data", "w, center=None, radius=None): if center is None: # use the middle of the", "The list of valid hdf :param path_to_files: list of all the files present", "= img.astype(np.float64, copy=False) mean = np.mean(img) std = np.std(img) img = (img -", "It will be ignored\" ) if len(list_candidate) > 0: result_list = list_candidate return", "print(\"Try to list images on\", path) import os filename_ext = os.path.basename(path).split(\".\")[-1] result_list =", "HDF\"\"\" def get_key_list_images(path): \"\"\" Returns the list of the keys representing the images", "of charge, to any person obtaining a copy of this software and associated", "(the \"Software\"), to deal in the Software without restriction, including without limitation the", "Returns the list of the keys representing the images in the hdf/mrcs/st file.", "print(list_images) exit() except Exception as e: print(e) print( \"\\nERROR in getImages_fromList_key: the file", "flip :return: \"\"\" if t == 1: return np.flipud(img) elif t == 2:", "get the following images\") print(list_images) print(\"there are \" + str(len(f[\"MDF\"][\"images\"]))) exit() elif path.basename(path_to_file).split(\".\")[-1]", "It is called recursively :param paths: path or list of paths :return: \"\"\"", "- center[0], h - center[1]) Y, X = np.ogrid[:h, :w] dist_from_center = np.sqrt((X", "will be ignored\" ) if len(list_candidate) > 0: result_list = list_candidate return result_list", "size :param img: as numpy array :param resize: resize size :return: return the", "copyright notice and this permission notice shall be included in all copies or", "\"mrcs\", \"st\"]: data = [] with mrcfile.mmap(path_to_file, permissive=True, mode=\"r\") as mrc: if isinstance(list_images,", "resample=Image.BILINEAR)) def normalize_img(img): \"\"\" normalize the images in base of its mean and", "of its mean and variance :param img: :return: \"\"\" import numpy as np", "mean = np.mean(img) std = np.std(img) img = (img - mean) / (std+0.00001)", "in [\"mrc\", \"mrcs\", \"st\"]: data = [] with mrcfile.mmap(path_to_file, permissive=True, mode=\"r\") as mrc:", "- center[1]) ** 2) mask = dist_from_center <= radius return mask def checkfiles(path_to_files):", "for p in path_to_files: if not path.isfile(p): return False elif isinstance(path_to_files, str): return", "to permit persons to whom the Software is furnished to do so, subject", "list_new_paths def getList_relevant_files(path_to_files): \"\"\" Check if the given files are hdf/mrcs/st with a", "--> flip over the column and the row (X and Y) otherwise -->", "as numpy array :param resize: resize size :return: return the resized img \"\"\"", "in the hdf/mrcs/st file. It will be converted in list of integer :param", "permissive=True, mode=\"r\") as mrc: result_list = list(range(1)) except Exception as e: print(e) print(", "\"\"\" The format of the .hf file is the following: ['MDF']['images']['i']['image'] where i", "the following images\") print(list_images) exit() except Exception as e: print(e) print( \"\\nERROR in", "0: result_list = list_candidate if filename_ext == \"mrc\": with mrcfile.mmap(path, permissive=True, mode=\"r\") as", "in get_list_images: the file '\" + path + \" is not an valid", "+ path + \" is not an HDF file with the following format:\\n\\t['MDF']['images'].", "> 1: data = [mrc.data[i] for i in list_images] elif len(list_images) == 1:", "flip the image in function of the given typ :param img: :param t:", "smallest distance between the center and image walls radius = min(center[0], center[1], w", "data = list() if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try: with h5py.File(path_to_file, driver=\"core\")", "img = img.astype(np.float32, copy=False) return img def flip_img(img, t=None): \"\"\" It flip the", ":param img: :return: \"\"\" import numpy as np # img = img.astype(np.float64, copy=False)", "conditions: The above copyright notice and this permission notice shall be included in", "called recursively :param paths: path or list of paths :return: \"\"\" if isinstance(paths,", "if all of them exists :param path_to_files: list of paths :return: \"\"\" if", "e: print(e) print( \"WARNING in get_list_images: the file '\" + path + \"", "path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try: with h5py.File(path_to_file, driver=\"core\") as f: if isinstance(list_images,", "return np.array(im.resize(resize, resample=Image.BILINEAR)) def normalize_img(img): \"\"\" normalize the images in base of its", "list of the keys representing the images in the hdf/mrcs/st file. It will", "THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "import h5py from PIL import Image # install it via pip install pillow", "\"\"\" FUNCTION TO READ THE HDF\"\"\" def get_key_list_images(path): \"\"\" Returns the list of", "f) for f in listdir(p)] elif path.isfile(p): list_new_paths.append(p) else: print( \"WARNING: The given", "all of them exists :param path_to_files: list of paths :return: \"\"\" if isinstance(path_to_files,", "with h5py.File(path_to_file, 'r') as f: if isinstance(list_images, list) or isinstance( list_images, tuple ):", "len(list_candidate) > 0: result_list = list_candidate if filename_ext == \"mrc\": with mrcfile.mmap(path, permissive=True,", "list of integer :param path: :return: \"\"\" print(\"Try to list images on\", path)", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER", "Permission is hereby granted, free of charge, to any person obtaining a copy", "the given paths. It is called recursively :param paths: path or list of", "be included in all copies or substantial portions of the Software. THE SOFTWARE", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "or a list/tuple of strings:\", type(list_images), ) print(\"you try to get the following", "file '\" + path_to_file + \" is not an HDF file with the", "its) given from 'get_list_images' :return: Returns a list of numpy arrays \"\"\" data", "for i in list_images] elif len(list_images) == 1: data = [mrc.data] return data", "2 --> flip over the column Flipped array in right-left direction(Y) 3 -->", "as f: if isinstance(list_images, list) or isinstance(list_images, tuple): data = [ f[\"MDF\"][\"images\"][str(i)][\"image\"][()] for", "of the image center = (int(w / 2), int(h / 2)) if radius", "whom the Software is furnished to do so, subject to the following conditions:", "['MDF']['images']['i']['image'] where i is a number representing the i-th images hence to get", "\"\"\" Returns the list of the keys representing the images in the hdf/mrcs/st", "the given files are hdf/mrcs/st with a valid format. Return The list of", "It flip the image in function of the given typ :param img: :param", "given path '\" + str(p) + \"' is not a folder or a", "it via pip install pillow import numpy as np import mrcfile \"\"\" The", "mean = np.mean(img) img[mask==False]=mean return img def resize_img(img, resize=(76, 76)): \"\"\" Resize the", "if path_to_file.endswith(\"mrcs\") or path_to_file.endswith(\"mrc\") or path_to_file.endswith(\"st\") or h5py.is_hdf5(path_to_file) ] \"\"\" FUNCTION TO READ", "1: if len(list_images)==1: data = np.nan_to_num(mrc.data[list_images[0]]) else: data = [np.nan_to_num(mrc.data[i]) for i in", "path_to_files: list of paths :return: \"\"\" if isinstance(path_to_files, (list, tuple)): for p in", "in range(mrc.header.nz)] if len(list_candidate) > 0: result_list = list_candidate if filename_ext == \"mrc\":", "file and it will be ignored\" ) if iterate is True: return getList_files(list_new_paths)", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\"", "a list of numpy arrays \"\"\" data = list() if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1]", "import numpy as np # img = img.astype(np.float64, copy=False) mean = np.mean(img) std", "[()] is used instead of .value elif isinstance(list_images, int): data = f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()] else:", "create_circular_mask(h, w, center=None, radius=None): if center is None: # use the middle of", "if path.isdir(p): iterate = True list_new_paths += [path.join(p, f) for f in listdir(p)]", "FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", ":param path_to_files: list of all the files present in the folder (and subfolder)given", "hdf file :param list_images: list of keys of the DB. It is the", "\"\"\" im = Image.fromarray(img) return np.array(im.resize(resize, resample=Image.BILINEAR)) def normalize_img(img): \"\"\" normalize the images", "'\" + path + \" is not an HDF file with the following", "user :return: list of valid hdf \"\"\" return [ path_to_file for path_to_file in", "\"\"\" if isinstance(paths, str): paths = [paths] list_new_paths = list() iterate = False", "mean) / (std+0.00001) # img = img.astype(np.float32, copy=False) return img def flip_img(img, t=None):", "is not an HDF file with the following format:\\n\\t['MDF']['images']. It will be ignored\"", "radius is None: # use the smallest distance between the center and image", "for i in list_images] elif len(list_images) == 1: data = np.nan_to_num(mrc.data) result_data.append(data) return", "portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "path_to_files: list of all the files present in the folder (and subfolder)given from", "\"st\"}: with mrcfile.mmap(path, permissive=True, mode=\"r\") as mrc: list_candidate = [i for i in", "np.std(img) img = (img - mean) / (std+0.00001) # img = img.astype(np.float32, copy=False)", "row (X and Y) otherwise --> no flip :return: \"\"\" if t ==", "F1 = fftpack.fft2(img) F2 = fftpack.fftshift(F1) psd2D = np.abs(F2) ** 2 return psd2D", "= list(range(1)) except Exception as e: print(e) print( \"WARNING in get_list_images: the file", "print(list_images) print(\"there are \" + str(len(f[\"MDF\"][\"images\"]))) exit() elif path.basename(path_to_file).split(\".\")[-1] in [\"mrc\", \"mrcs\", \"st\"]:", "p in paths: if path.isdir(p): iterate = True list_new_paths += [path.join(p, f) for", "used instead of .value elif isinstance(list_images, int): data = np.nan_to_num(f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()]) else: print( \"\\nERROR", "to get the images number 5: ['MDF']['images']['5']['image'][()] \"\"\" def create_circular_mask(h, w, center=None, radius=None):", "numpy as np # img = img.astype(np.float64, copy=False) mean = np.mean(img) std =", "over the row. Flipped array in up-down direction.(X) 2 --> flip over the", "variance :param img: :return: \"\"\" import numpy as np # img = img.astype(np.float64,", "class selection tool. MIT License Copyright (c) 2019 <NAME> Institute of Molecular Physiology", "+ \"' is not a folder or a file and it will be", "DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "a list/tuple of strings:\", type(list_images), ) print(\"you try to get the following images\")", "radius return mask def checkfiles(path_to_files): \"\"\" checks if the hdf files are in", "pillow import numpy as np import mrcfile \"\"\" The format of the .hf", "distribute, sublicense, and/or sell copies of the Software, and to permit persons to", "of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "5: ['MDF']['images']['5']['image'][()] \"\"\" def create_circular_mask(h, w, center=None, radius=None): if center is None: #", "software and associated documentation files (the \"Software\"), to deal in the Software without", "iterate = False for p in paths: if path.isdir(p): iterate = True list_new_paths", "Returns a list of numpy arrays \"\"\" # driver=\"core\" result_data = list() for", "path.isfile(p): list_new_paths.append(p) else: print( \"WARNING: The given path '\" + str(p) + \"'", "/ 2), int(h / 2)) if radius is None: # use the smallest", "strings:\", type(list_images), ) print(\"you try to get the following images\") print(list_images) exit() except", "Exception as e: print(e) print( \"\\nERROR in getImages_fromList_key: the file '\" + path_to_file", "of the valid hdf files in the given paths. It is called recursively", "the valid hdf files in the given paths. It is called recursively :param", "int(h / 2)) if radius is None: # use the smallest distance between", "of them exists :param path_to_files: list of paths :return: \"\"\" if isinstance(path_to_files, (list,", "normalize the images in base of its mean and variance :param img: :return:", "path + \" is not an valid mrc file. It will be ignored\"", "image into the given size :param img: as numpy array :param resize: resize", "hdf/mrcs/st with a valid format. Return The list of valid hdf :param path_to_files:", "shall be included in all copies or substantial portions of the Software. THE", "def resize_img(img, resize=(76, 76)): \"\"\" Resize the given image into the given size", "isinstance(list_images, list) or isinstance(list_images, tuple): if mrc.header.nz > 1: data = [mrc.data[i] for", "\"\"\" Check if the given files are hdf/mrcs/st with a valid format. Return", "READ THE HDF\"\"\" def get_key_list_images(path): \"\"\" Returns the list of the keys representing", "path, listdir import h5py from PIL import Image # install it via pip", "in the given paths. It is called recursively :param paths: path or list", "the images in the hdf file (path_to_file) listed in (list_images) :param path_to_file: path", "in list(f[\"MDF\"][\"images\"])] except: print( \"WARNING in get_list_images: the file '\" + path +", "NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "subfolder)given from the user :return: list of valid hdf \"\"\" return [ path_to_file", "'get_list_images' :return: Returns a list of numpy arrays \"\"\" # driver=\"core\" result_data =", "# [()] is used instead of .value elif isinstance(list_images, int): data = f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()]", "hence to get the images number 5: ['MDF']['images']['5']['image'][()] \"\"\" def create_circular_mask(h, w, center=None,", "are in the correct path and returns True if all of them exists", "range(mrc.header.nz)] if len(list_candidate) > 0: result_list = list_candidate if filename_ext == \"mrc\": with", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "(int(w / 2), int(h / 2)) if radius is None: # use the", "is None: # use the middle of the image center = (int(w /", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE", "if radius is None: # use the smallest distance between the center and", "= Image.fromarray(img) return np.array(im.resize(resize, resample=Image.BILINEAR)) def normalize_img(img): \"\"\" normalize the images in base", "hdf files are in the correct path and returns True if all of", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT", "t == 2: return np.fliplr(img) elif t == 3: return np.flipud(np.fliplr(img)) return img", "checkfiles(path_to_files): \"\"\" checks if the hdf files are in the correct path and", "def apply_mask(img, mask): mean = np.mean(img) img[mask==False]=mean return img def resize_img(img, resize=(76, 76)):", "= np.abs(F2) ** 2 return psd2D def getList_files(paths): \"\"\" Returns the list of", "+ (Y - center[1]) ** 2) mask = dist_from_center <= radius return mask", "the Software is furnished to do so, subject to the following conditions: The", "list) or isinstance( list_images, tuple ): data = [ np.nan_to_num(f[\"MDF\"][\"images\"][str(i)][\"image\"][()]) for i in", "\"\"\" if t == 1: return np.flipud(img) elif t == 2: return np.fliplr(img)", "a list of numpy arrays \"\"\" # driver=\"core\" result_data = list() for path_to_file,", "[ f[\"MDF\"][\"images\"][str(i)][\"image\"][()] for i in list_images ] # [()] is used instead of", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING", "the .hf file is the following: ['MDF']['images']['i']['image'] where i is a number representing", "path_to_file.endswith(\"mrc\") or path_to_file.endswith(\"st\") or h5py.is_hdf5(path_to_file) ] \"\"\" FUNCTION TO READ THE HDF\"\"\" def", "img \"\"\" im = Image.fromarray(img) return np.array(im.resize(resize, resample=Image.BILINEAR)) def normalize_img(img): \"\"\" normalize the", "path_to_file, list_images in file_index_tubles: data = list() if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] == \"hdf\":", "typ :param img: :param t: type of the flip 1 --> flip over", "the files present in the folder (and subfolder)given from the user :return: list", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT", "the i-th images hence to get the images number 5: ['MDF']['images']['5']['image'][()] \"\"\" def", "path: :return: \"\"\" print(\"Try to list images on\", path) import os filename_ext =", "else: print( \"\\nERROR in getImages_fromList_key: invalid list_images, it should be a string or", "EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS", "the keys representing the images in the hdf/mrcs/st file. It will be converted", "mrcfile \"\"\" The format of the .hf file is the following: ['MDF']['images']['i']['image'] where", "radius=None): if center is None: # use the middle of the image center", "images in the hdf/mrcs/st file. It will be converted in list of integer", "list_candidate = [i for i in range(mrc.header.nz)] if len(list_candidate) > 0: result_list =", "list) or isinstance(list_images, tuple): if mrc.header.nz > 1: if len(list_images)==1: data = np.nan_to_num(mrc.data[list_images[0]])", "Physiology Permission is hereby granted, free of charge, to any person obtaining a", "part of its) given from 'get_list_images' :return: Returns a list of numpy arrays", "subject to the following conditions: The above copyright notice and this permission notice", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE", "2 return psd2D def getList_files(paths): \"\"\" Returns the list of the valid hdf", "else: print( \"WARNING: The given path '\" + str(p) + \"' is not", "format:\\n\\t['MDF']['images']['0']['image']\" ) print(\"you try to get the following images\") print(list_images) print(\"there are \"", "as np import mrcfile \"\"\" The format of the .hf file is the", "or isinstance( list_images, tuple ): data = [ np.nan_to_num(f[\"MDF\"][\"images\"][str(i)][\"image\"][()]) for i in list_images", "elif isinstance(list_images, int): data = np.nan_to_num(f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()]) else: print( \"\\nERROR in getImages_fromList_key: invalid list_images,", "an valid mrc file. It will be ignored\" ) if filename_ext == \"hdf\":", "list_images, it should be a string or a list/tuple of strings:\", type(list_images), )", "install it via pip install pillow import numpy as np import mrcfile \"\"\"", "f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()] else: print( \"\\nERROR in getImages_fromList_key: invalid list_images, it should be a string", "def flip_img(img, t=None): \"\"\" It flip the image in function of the given", "up-down direction.(X) 2 --> flip over the column Flipped array in right-left direction(Y)", "\" is not an valid mrc file. It will be ignored\" ) if", "# img = img.astype(np.float64, copy=False) mean = np.mean(img) std = np.std(img) img =", "them exists :param path_to_files: list of paths :return: \"\"\" if isinstance(path_to_files, (list, tuple)):", "list_images] elif len(list_images) == 1: data = np.nan_to_num(mrc.data) result_data.append(data) return result_data def getImages_fromList_key_old(path_to_file,", "def getImages_fromList_key_old(path_to_file, list_images): \"\"\" Returns the images in the hdf file (path_to_file) listed", "of the DB. It is the output( or part of its) given from", "not an HDF file with the following format:\\n\\t['MDF']['images']['0']['image']\" ) print(\"you try to get", "\"\"\" import numpy as np # img = img.astype(np.float64, copy=False) mean = np.mean(img)", "filename_ext in {\"mrcs\", \"st\"}: with mrcfile.mmap(path, permissive=True, mode=\"r\") as mrc: list_candidate = [i", "of all the files present in the folder (and subfolder)given from the user", "is hereby granted, free of charge, to any person obtaining a copy of", "print(\"you try to get the following images\") print(list_images) print(\"there are \" + str(len(f[\"MDF\"][\"images\"])))", "mode=\"r\") as mrc: if isinstance(list_images, int): list_images = [list_images] if isinstance(list_images, list) or", "and associated documentation files (the \"Software\"), to deal in the Software without restriction,", "FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or", "for path_to_file, list_images in file_index_tubles: data = list() if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] ==", "try to get the following images\") print(list_images) exit() except Exception as e: print(e)", "be a string or a list/tuple of strings:\", type(list_images), ) print(\"you try to", "otherwise --> no flip :return: \"\"\" if t == 1: return np.flipud(img) elif", "in getImages_fromList_key: invalid list_images, it should be a string or a list/tuple of", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "representing the i-th images hence to get the images number 5: ['MDF']['images']['5']['image'][()] \"\"\"", "in path_to_files: if not path.isfile(p): return False elif isinstance(path_to_files, str): return path.isfile(path_to_files) return", "try to get the following images\") print(list_images) print(\"there are \" + str(len(f[\"MDF\"][\"images\"]))) exit()", "i-th images hence to get the images number 5: ['MDF']['images']['5']['image'][()] \"\"\" def create_circular_mask(h,", "try: if filename_ext in {\"mrcs\", \"st\"}: with mrcfile.mmap(path, permissive=True, mode=\"r\") as mrc: list_candidate", "radius = min(center[0], center[1], w - center[0], h - center[1]) Y, X =", "apply_mask(img, mask): mean = np.mean(img) img[mask==False]=mean return img def resize_img(img, resize=(76, 76)): \"\"\"", "from scipy import fftpack import numpy as np F1 = fftpack.fft2(img) F2 =", "len(list_images)==1: data = np.nan_to_num(mrc.data[list_images[0]]) else: data = [np.nan_to_num(mrc.data[i]) for i in list_images] elif", "# driver=\"core\" result_data = list() for path_to_file, list_images in file_index_tubles: data = list()", "to hdf file :param list_images: list of keys of the DB. It is", "[int(v) for v in list(f[\"MDF\"][\"images\"])] except: print( \"WARNING in get_list_images: the file '\"", "os.path.basename(path).split(\".\")[-1] result_list = None try: if filename_ext in {\"mrcs\", \"st\"}: with mrcfile.mmap(path, permissive=True,", "hereby granted, free of charge, to any person obtaining a copy of this", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE", "TO READ THE HDF\"\"\" def get_key_list_images(path): \"\"\" Returns the list of the keys", "a valid format. Return The list of valid hdf :param path_to_files: list of", "\"\"\" def create_circular_mask(h, w, center=None, radius=None): if center is None: # use the", "isinstance(list_images, list) or isinstance( list_images, tuple ): data = [ np.nan_to_num(f[\"MDF\"][\"images\"][str(i)][\"image\"][()]) for i", "in function of the given typ :param img: :param t: type of the", "in listdir(p)] elif path.isfile(p): list_new_paths.append(p) else: print( \"WARNING: The given path '\" +", "print( \"WARNING in get_list_images: the file '\" + path + \" is not", "the correct path and returns True if all of them exists :param path_to_files:", "arrays \"\"\" data = list() if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try: with", "restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute,", "in the hdf file (path_to_file) listed in (list_images) :param path_to_file: path to hdf", "OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR", "the row (X and Y) otherwise --> no flip :return: \"\"\" if t", "print( \"WARNING: The given path '\" + str(p) + \"' is not a", "to the following conditions: The above copyright notice and this permission notice shall", "OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from os import", "{\"mrcs\", \"st\"}: with mrcfile.mmap(path, permissive=True, mode=\"r\") as mrc: list_candidate = [i for i", "fftpack.fft2(img) F2 = fftpack.fftshift(F1) psd2D = np.abs(F2) ** 2 return psd2D def getList_files(paths):", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "Software, and to permit persons to whom the Software is furnished to do", "not path.isfile(p): return False elif isinstance(path_to_files, str): return path.isfile(path_to_files) return True def calc_2d_spectra(img):", "path_to_file: path to hdf file :param list_images: list of keys of the DB.", "tuple): if mrc.header.nz > 1: if len(list_images)==1: data = np.nan_to_num(mrc.data[list_images[0]]) else: data =", "im = Image.fromarray(img) return np.array(im.resize(resize, resample=Image.BILINEAR)) def normalize_img(img): \"\"\" normalize the images in", "instead of .value elif isinstance(list_images, int): data = np.nan_to_num(f[\"MDF\"][\"images\"][str(list_images)][\"image\"][()]) else: print( \"\\nERROR in", "given size :param img: as numpy array :param resize: resize size :return: return", "True if all of them exists :param path_to_files: list of paths :return: \"\"\"", "\"\"\" Resize the given image into the given size :param img: as numpy", "arrays \"\"\" # driver=\"core\" result_data = list() for path_to_file, list_images in file_index_tubles: data", "= [np.nan_to_num(mrc.data[i]) for i in list_images] elif len(list_images) == 1: data = np.nan_to_num(mrc.data)", "valid mrc file. It will be ignored\" ) if filename_ext == \"hdf\": try:", "given image into the given size :param img: as numpy array :param resize:", "0: result_list = list_candidate return result_list def getImages_fromList_key(file_index_tubles): \"\"\" Returns the images in", "the DB. It is the output( or part of its) given from 'get_list_images'", ":w] dist_from_center = np.sqrt((X - center[0]) ** 2 + (Y - center[1]) **", "f in listdir(p)] elif path.isfile(p): list_new_paths.append(p) else: print( \"WARNING: The given path '\"", "of the flip 1 --> flip over the row. Flipped array in up-down", "valid hdf files in the given paths. It is called recursively :param paths:", "if filename_ext in {\"mrcs\", \"st\"}: with mrcfile.mmap(path, permissive=True, mode=\"r\") as mrc: list_candidate =", "= list() for path_to_file, list_images in file_index_tubles: data = list() if path.isfile(path_to_file): if", "list of numpy arrays \"\"\" # driver=\"core\" result_data = list() for path_to_file, list_images", "np.nan_to_num(mrc.data[list_images[0]]) else: data = [np.nan_to_num(mrc.data[i]) for i in list_images] elif len(list_images) == 1:", "list of numpy arrays \"\"\" data = list() if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] ==", "\"\"\" data = list() if path.isfile(path_to_file): if path.basename(path_to_file).split(\".\")[-1] == \"hdf\": try: with h5py.File(path_to_file,", "ignored\" ) if len(list_candidate) > 0: result_list = list_candidate return result_list def getImages_fromList_key(file_index_tubles):", "folder (and subfolder)given from the user :return: list of valid hdf \"\"\" return", "= list() iterate = False for p in paths: if path.isdir(p): iterate =", "def getImages_fromList_key(file_index_tubles): \"\"\" Returns the images in the hdf file (path_to_file) listed in" ]
[ "Initializer for worker processes that makes them ignore interrupt signals https://docs.python.org/3/library/signal.html#signal.signal https://docs.python.org/3/library/signal.html#signal.SIG_IGN \"\"\"", "signal def worker_init(): \"\"\" Initializer for worker processes that makes them ignore interrupt", "worker_init(): \"\"\" Initializer for worker processes that makes them ignore interrupt signals https://docs.python.org/3/library/signal.html#signal.signal", "import signal def worker_init(): \"\"\" Initializer for worker processes that makes them ignore", "worker processes that makes them ignore interrupt signals https://docs.python.org/3/library/signal.html#signal.signal https://docs.python.org/3/library/signal.html#signal.SIG_IGN \"\"\" signal.signal(signal.SIGINT, signal.SIG_IGN)", "<filename>blacktape/util.py import signal def worker_init(): \"\"\" Initializer for worker processes that makes them", "\"\"\" Initializer for worker processes that makes them ignore interrupt signals https://docs.python.org/3/library/signal.html#signal.signal https://docs.python.org/3/library/signal.html#signal.SIG_IGN", "for worker processes that makes them ignore interrupt signals https://docs.python.org/3/library/signal.html#signal.signal https://docs.python.org/3/library/signal.html#signal.SIG_IGN \"\"\" signal.signal(signal.SIGINT,", "def worker_init(): \"\"\" Initializer for worker processes that makes them ignore interrupt signals" ]
[ "\"earned\", getWordScore(guess, n), \"points. Total:\", score, \"points\", '\\n') # Update the hand hand", "lowercase strings n: integer (HAND_SIZE; i.e., hand size required for additional points) \"\"\"", "the word earned, and the updated total score, in one line followed by", "valid word is entered, it uses up letters from the hand. * After", "playHand(hand, wordList, n): \"\"\" Allows the user to play the given hand, as", "dictionary (string -> int) wordList: list of lowercase strings n: integer (HAND_SIZE; i.e.,", "Game is over (user entered a '.' or ran out of letters), so", "\".\" * When a valid word is entered, it uses up letters from", "hand is displayed. * The user may input a word or a single", "user the total score if guess == '.': print('Goodbye! Total score:', score, 'points.')", "try again.', '\\n') # Otherwise (the word is valid): else: # Tell the", "(the word is valid): else: # Tell the user how many points the", "* When a valid word is entered, it uses up letters from the", "As long as there are still letters left in the hand: while calculateHandlen(hand)", "while calculateHandlen(hand) > 0: # Display the hand print('Current Hand:', end=' '); displayHand(hand)", "blank line) print('Invalid word, please try again.', '\\n') # Otherwise (the word is", "n) print('\"'+guess+'\"', \"earned\", getWordScore(guess, n), \"points. Total:\", score, \"points\", '\\n') # Update the", "letters or the user inputs a \".\" hand: dictionary (string -> int) wordList:", "PSEUDOCODE (download ps4a.py to see) # Keep track of the total score score", "words are rejected, and a message is displayed asking the user to choose", "points the word earned, and the updated total score, in one line followed", "many points the word earned, and the updated total score, in one line", "to indicate that you are finished: ')) # If the input is a", "a blank line) print('Invalid word, please try again.', '\\n') # Otherwise (the word", "as there are still letters left in the hand: while calculateHandlen(hand) > 0:", "of letters), so tell user the total score if guess == '.': print('Goodbye!", "hand: dictionary (string -> int) wordList: list of lowercase strings n: integer (HAND_SIZE;", "from the hand. * After every valid word: the score for that word", "user inputs a \".\" hand: dictionary (string -> int) wordList: list of lowercase", "total score, in one line followed by a blank line score += getWordScore(guess,", "finishes. * The hand finishes when there are no more unused letters or", "out of the loop) break # Otherwise (the input is not a single", "when there are no more unused letters or the user inputs a \".\"", "the remaining letters in the hand are displayed, and the user is asked", "the hand: while calculateHandlen(hand) > 0: # Display the hand print('Current Hand:', end='", "\"\"\" # BEGIN PSEUDOCODE (download ps4a.py to see) # Keep track of the", "entered, it uses up letters from the hand. * After every valid word:", "# As long as there are still letters left in the hand: while", "Keep track of the total score score = 0 # As long as", "displayed, and the user is asked to input another word. * The sum", "user to play the given hand, as follows: * The hand is displayed.", "score, \"points\", '\\n') # Update the hand hand = updateHand(hand, guess) # Game", "the hand are displayed, and the user is asked to input another word.", "are rejected, and a message is displayed asking the user to choose another", "score score = 0 # As long as there are still letters left", "blank line score += getWordScore(guess, n) print('\"'+guess+'\"', \"earned\", getWordScore(guess, n), \"points. Total:\", score,", "see) # Keep track of the total score score = 0 # As", "finished: ')) # If the input is a single period: if guess ==", "hand = updateHand(hand, guess) # Game is over (user entered a '.' or", "in one line followed by a blank line score += getWordScore(guess, n) print('\"'+guess+'\"',", "print('Invalid word, please try again.', '\\n') # Otherwise (the word is valid): else:", "(download ps4a.py to see) # Keep track of the total score score =", "user is asked to input another word. * The sum of the word", "indicate they're done playing * Invalid words are rejected, and a message is", "or ran out of letters), so tell user the total score if guess", "for that word is displayed, the remaining letters in the hand are displayed,", "indicate that you are finished: ')) # If the input is a single", "playing * Invalid words are rejected, and a message is displayed asking the", "if guess == '.': # End the game (break out of the loop)", "are no more unused letters or the user inputs a \".\" hand: dictionary", "\"\"\" Allows the user to play the given hand, as follows: * The", "when the hand finishes. * The hand finishes when there are no more", "required for additional points) \"\"\" # BEGIN PSEUDOCODE (download ps4a.py to see) #", "the input is a single period: if guess == '.': # End the", "== False: # Reject invalid word (print a message followed by a blank", "in the hand are displayed, and the user is asked to input another", "(user entered a '.' or ran out of letters), so tell user the", "string \".\") to indicate they're done playing * Invalid words are rejected, and", "a \".\" to indicate that you are finished: ')) # If the input", "the hand hand = updateHand(hand, guess) # Game is over (user entered a", "wordList: list of lowercase strings n: integer (HAND_SIZE; i.e., hand size required for", "additional points) \"\"\" # BEGIN PSEUDOCODE (download ps4a.py to see) # Keep track", "to see) # Keep track of the total score score = 0 #", "Total:\", score, \"points\", '\\n') # Update the hand hand = updateHand(hand, guess) #", "another word until they enter a valid word or \".\" * When a", "it uses up letters from the hand. * After every valid word: the", "False: # Reject invalid word (print a message followed by a blank line)", "that word is displayed, the remaining letters in the hand are displayed, and", "== '.': # End the game (break out of the loop) break #", "entered a '.' or ran out of letters), so tell user the total", "# Display the hand print('Current Hand:', end=' '); displayHand(hand) # Ask user for", "The hand is displayed. * The user may input a word or a", "'); displayHand(hand) # Ask user for input guess = str(input('Enter word, or a", "hand hand = updateHand(hand, guess) # Game is over (user entered a '.'", "to choose another word until they enter a valid word or \".\" *", "finishes when there are no more unused letters or the user inputs a", "total score score = 0 # As long as there are still letters", "line score += getWordScore(guess, n) print('\"'+guess+'\"', \"earned\", getWordScore(guess, n), \"points. Total:\", score, \"points\",", "or the user inputs a \".\" hand: dictionary (string -> int) wordList: list", "else: # Tell the user how many points the word earned, and the", "to play the given hand, as follows: * The hand is displayed. *", "* The sum of the word scores is displayed when the hand finishes.", "user for input guess = str(input('Enter word, or a \".\" to indicate that", "line followed by a blank line score += getWordScore(guess, n) print('\"'+guess+'\"', \"earned\", getWordScore(guess,", "total score if guess == '.': print('Goodbye! Total score:', score, 'points.') else: print('Run", "out of letters), so tell user the total score if guess == '.':", "follows: * The hand is displayed. * The user may input a word", "hand print('Current Hand:', end=' '); displayHand(hand) # Ask user for input guess =", "Allows the user to play the given hand, as follows: * The hand", "* Invalid words are rejected, and a message is displayed asking the user", "and a message is displayed asking the user to choose another word until", "getWordScore(guess, n), \"points. Total:\", score, \"points\", '\\n') # Update the hand hand =", "hand are displayed, and the user is asked to input another word. *", "rejected, and a message is displayed asking the user to choose another word", "word. * The sum of the word scores is displayed when the hand", "hand. * After every valid word: the score for that word is displayed,", "else: # If the word is not valid: if isValidWord(guess, hand, wordList) ==", "hand: while calculateHandlen(hand) > 0: # Display the hand print('Current Hand:', end=' ');", "print('\"'+guess+'\"', \"earned\", getWordScore(guess, n), \"points. Total:\", score, \"points\", '\\n') # Update the hand", "If the input is a single period: if guess == '.': # End", "guess == '.': # End the game (break out of the loop) break", "# BEGIN PSEUDOCODE (download ps4a.py to see) # Keep track of the total", "user may input a word or a single period (the string \".\") to", "the score for that word is displayed, the remaining letters in the hand", "followed by a blank line score += getWordScore(guess, n) print('\"'+guess+'\"', \"earned\", getWordScore(guess, n),", "hand, wordList) == False: # Reject invalid word (print a message followed by", "Update the hand hand = updateHand(hand, guess) # Game is over (user entered", "sum of the word scores is displayed when the hand finishes. * The", "they're done playing * Invalid words are rejected, and a message is displayed", "> 0: # Display the hand print('Current Hand:', end=' '); displayHand(hand) # Ask", "guess == '.': print('Goodbye! Total score:', score, 'points.') else: print('Run out of letters.", "score, in one line followed by a blank line score += getWordScore(guess, n)", "are displayed, and the user is asked to input another word. * The", "no more unused letters or the user inputs a \".\" hand: dictionary (string", "up letters from the hand. * After every valid word: the score for", "valid word: the score for that word is displayed, the remaining letters in", "score = 0 # As long as there are still letters left in", "# If the input is a single period: if guess == '.': #", "period): else: # If the word is not valid: if isValidWord(guess, hand, wordList)", "break # Otherwise (the input is not a single period): else: # If", "the user to play the given hand, as follows: * The hand is", "0: # Display the hand print('Current Hand:', end=' '); displayHand(hand) # Ask user", "'.' or ran out of letters), so tell user the total score if", "End the game (break out of the loop) break # Otherwise (the input", "the user how many points the word earned, and the updated total score,", "print('Current Hand:', end=' '); displayHand(hand) # Ask user for input guess = str(input('Enter", "not a single period): else: # If the word is not valid: if", "# If the word is not valid: if isValidWord(guess, hand, wordList) == False:", "inputs a \".\" hand: dictionary (string -> int) wordList: list of lowercase strings", "message is displayed asking the user to choose another word until they enter", "or a single period (the string \".\") to indicate they're done playing *", "Otherwise (the input is not a single period): else: # If the word", "by a blank line) print('Invalid word, please try again.', '\\n') # Otherwise (the", "the user is asked to input another word. * The sum of the", "# Tell the user how many points the word earned, and the updated", "Invalid words are rejected, and a message is displayed asking the user to", "how many points the word earned, and the updated total score, in one", "a valid word or \".\" * When a valid word is entered, it", "followed by a blank line) print('Invalid word, please try again.', '\\n') # Otherwise", "if guess == '.': print('Goodbye! Total score:', score, 'points.') else: print('Run out of", "tell user the total score if guess == '.': print('Goodbye! Total score:', score,", "valid: if isValidWord(guess, hand, wordList) == False: # Reject invalid word (print a", "(string -> int) wordList: list of lowercase strings n: integer (HAND_SIZE; i.e., hand", "asking the user to choose another word until they enter a valid word", "calculateHandlen(hand) > 0: # Display the hand print('Current Hand:', end=' '); displayHand(hand) #", "word (print a message followed by a blank line) print('Invalid word, please try", "letters), so tell user the total score if guess == '.': print('Goodbye! Total", "ran out of letters), so tell user the total score if guess ==", "getWordScore(guess, n) print('\"'+guess+'\"', \"earned\", getWordScore(guess, n), \"points. Total:\", score, \"points\", '\\n') # Update", "Reject invalid word (print a message followed by a blank line) print('Invalid word,", "valid word or \".\" * When a valid word is entered, it uses", "(print a message followed by a blank line) print('Invalid word, please try again.',", "word or \".\" * When a valid word is entered, it uses up", "-> int) wordList: list of lowercase strings n: integer (HAND_SIZE; i.e., hand size", "guess) # Game is over (user entered a '.' or ran out of", "letters from the hand. * After every valid word: the score for that", "for input guess = str(input('Enter word, or a \".\" to indicate that you", "ps4a.py to see) # Keep track of the total score score = 0", "is valid): else: # Tell the user how many points the word earned,", "score if guess == '.': print('Goodbye! Total score:', score, 'points.') else: print('Run out", "done playing * Invalid words are rejected, and a message is displayed asking", "The sum of the word scores is displayed when the hand finishes. *", "* The hand finishes when there are no more unused letters or the", "invalid word (print a message followed by a blank line) print('Invalid word, please", "word is valid): else: # Tell the user how many points the word", "you are finished: ')) # If the input is a single period: if", "are still letters left in the hand: while calculateHandlen(hand) > 0: # Display", "score += getWordScore(guess, n) print('\"'+guess+'\"', \"earned\", getWordScore(guess, n), \"points. Total:\", score, \"points\", '\\n')", "# Keep track of the total score score = 0 # As long", "a message is displayed asking the user to choose another word until they", "is displayed, the remaining letters in the hand are displayed, and the user", "every valid word: the score for that word is displayed, the remaining letters", "remaining letters in the hand are displayed, and the user is asked to", "# Reject invalid word (print a message followed by a blank line) print('Invalid", "of lowercase strings n: integer (HAND_SIZE; i.e., hand size required for additional points)", "enter a valid word or \".\" * When a valid word is entered,", "period (the string \".\") to indicate they're done playing * Invalid words are", "* After every valid word: the score for that word is displayed, the", "# Otherwise (the input is not a single period): else: # If the", "not valid: if isValidWord(guess, hand, wordList) == False: # Reject invalid word (print", "or \".\" * When a valid word is entered, it uses up letters", "hand finishes when there are no more unused letters or the user inputs", "a single period): else: # If the word is not valid: if isValidWord(guess,", "'.': # End the game (break out of the loop) break # Otherwise", "message followed by a blank line) print('Invalid word, please try again.', '\\n') #", "that you are finished: ')) # If the input is a single period:", "# End the game (break out of the loop) break # Otherwise (the", "hand, as follows: * The hand is displayed. * The user may input", "over (user entered a '.' or ran out of letters), so tell user", "the hand finishes. * The hand finishes when there are no more unused", "# Update the hand hand = updateHand(hand, guess) # Game is over (user", "is displayed. * The user may input a word or a single period", "* The hand is displayed. * The user may input a word or", "a word or a single period (the string \".\") to indicate they're done", "Ask user for input guess = str(input('Enter word, or a \".\" to indicate", "word, or a \".\" to indicate that you are finished: ')) # If", "long as there are still letters left in the hand: while calculateHandlen(hand) >", "asked to input another word. * The sum of the word scores is", "is entered, it uses up letters from the hand. * After every valid", "\"points. Total:\", score, \"points\", '\\n') # Update the hand hand = updateHand(hand, guess)", "list of lowercase strings n: integer (HAND_SIZE; i.e., hand size required for additional", "is displayed asking the user to choose another word until they enter a", "of the total score score = 0 # As long as there are", "to indicate they're done playing * Invalid words are rejected, and a message", "valid): else: # Tell the user how many points the word earned, and", "in the hand: while calculateHandlen(hand) > 0: # Display the hand print('Current Hand:',", "a single period: if guess == '.': # End the game (break out", "integer (HAND_SIZE; i.e., hand size required for additional points) \"\"\" # BEGIN PSEUDOCODE", "# Otherwise (the word is valid): else: # Tell the user how many", "user how many points the word earned, and the updated total score, in", "there are no more unused letters or the user inputs a \".\" hand:", "the game (break out of the loop) break # Otherwise (the input is", "earned, and the updated total score, in one line followed by a blank", "choose another word until they enter a valid word or \".\" * When", "= 0 # As long as there are still letters left in the", "Display the hand print('Current Hand:', end=' '); displayHand(hand) # Ask user for input", "If the word is not valid: if isValidWord(guess, hand, wordList) == False: #", "is a single period: if guess == '.': # End the game (break", "one line followed by a blank line score += getWordScore(guess, n) print('\"'+guess+'\"', \"earned\",", "a message followed by a blank line) print('Invalid word, please try again.', '\\n')", "'\\n') # Update the hand hand = updateHand(hand, guess) # Game is over", "they enter a valid word or \".\" * When a valid word is", "n): \"\"\" Allows the user to play the given hand, as follows: *", "Otherwise (the word is valid): else: # Tell the user how many points", "size required for additional points) \"\"\" # BEGIN PSEUDOCODE (download ps4a.py to see)", "single period (the string \".\") to indicate they're done playing * Invalid words", "is asked to input another word. * The sum of the word scores", "int) wordList: list of lowercase strings n: integer (HAND_SIZE; i.e., hand size required", "displayed when the hand finishes. * The hand finishes when there are no", "is not valid: if isValidWord(guess, hand, wordList) == False: # Reject invalid word", "Total score:', score, 'points.') else: print('Run out of letters. Total score:', score, 'points.')", "and the user is asked to input another word. * The sum of", "by a blank line score += getWordScore(guess, n) print('\"'+guess+'\"', \"earned\", getWordScore(guess, n), \"points.", "= str(input('Enter word, or a \".\" to indicate that you are finished: '))", "input is a single period: if guess == '.': # End the game", "\".\") to indicate they're done playing * Invalid words are rejected, and a", "period: if guess == '.': # End the game (break out of the", "updateHand(hand, guess) # Game is over (user entered a '.' or ran out", "The hand finishes when there are no more unused letters or the user", "updated total score, in one line followed by a blank line score +=", "are finished: ')) # If the input is a single period: if guess", "if isValidWord(guess, hand, wordList) == False: # Reject invalid word (print a message", "the loop) break # Otherwise (the input is not a single period): else:", "the user to choose another word until they enter a valid word or", "scores is displayed when the hand finishes. * The hand finishes when there", "is over (user entered a '.' or ran out of letters), so tell", "for additional points) \"\"\" # BEGIN PSEUDOCODE (download ps4a.py to see) # Keep", "and the updated total score, in one line followed by a blank line", "single period): else: # If the word is not valid: if isValidWord(guess, hand,", "to input another word. * The sum of the word scores is displayed", "word is not valid: if isValidWord(guess, hand, wordList) == False: # Reject invalid", "the total score if guess == '.': print('Goodbye! Total score:', score, 'points.') else:", "def playHand(hand, wordList, n): \"\"\" Allows the user to play the given hand,", "strings n: integer (HAND_SIZE; i.e., hand size required for additional points) \"\"\" #", "'\\n') # Otherwise (the word is valid): else: # Tell the user how", "BEGIN PSEUDOCODE (download ps4a.py to see) # Keep track of the total score", "user to choose another word until they enter a valid word or \".\"", "After every valid word: the score for that word is displayed, the remaining", "hand size required for additional points) \"\"\" # BEGIN PSEUDOCODE (download ps4a.py to", "again.', '\\n') # Otherwise (the word is valid): else: # Tell the user", "another word. * The sum of the word scores is displayed when the", "single period: if guess == '.': # End the game (break out of", "the hand print('Current Hand:', end=' '); displayHand(hand) # Ask user for input guess", "word: the score for that word is displayed, the remaining letters in the", "word, please try again.', '\\n') # Otherwise (the word is valid): else: #", "the word scores is displayed when the hand finishes. * The hand finishes", "left in the hand: while calculateHandlen(hand) > 0: # Display the hand print('Current", "guess = str(input('Enter word, or a \".\" to indicate that you are finished:", "')) # If the input is a single period: if guess == '.':", "# Game is over (user entered a '.' or ran out of letters),", "as follows: * The hand is displayed. * The user may input a", "displayed, the remaining letters in the hand are displayed, and the user is", "please try again.', '\\n') # Otherwise (the word is valid): else: # Tell", "loop) break # Otherwise (the input is not a single period): else: #", "input guess = str(input('Enter word, or a \".\" to indicate that you are", "Hand:', end=' '); displayHand(hand) # Ask user for input guess = str(input('Enter word,", "input another word. * The sum of the word scores is displayed when", "given hand, as follows: * The hand is displayed. * The user may", "is displayed when the hand finishes. * The hand finishes when there are", "n: integer (HAND_SIZE; i.e., hand size required for additional points) \"\"\" # BEGIN", "the given hand, as follows: * The hand is displayed. * The user", "(break out of the loop) break # Otherwise (the input is not a", "= updateHand(hand, guess) # Game is over (user entered a '.' or ran", "the total score score = 0 # As long as there are still", "i.e., hand size required for additional points) \"\"\" # BEGIN PSEUDOCODE (download ps4a.py", "displayed asking the user to choose another word until they enter a valid", "unused letters or the user inputs a \".\" hand: dictionary (string -> int)", "a single period (the string \".\") to indicate they're done playing * Invalid", "isValidWord(guess, hand, wordList) == False: # Reject invalid word (print a message followed", "track of the total score score = 0 # As long as there", "more unused letters or the user inputs a \".\" hand: dictionary (string ->", "game (break out of the loop) break # Otherwise (the input is not", "(the input is not a single period): else: # If the word is", "n), \"points. Total:\", score, \"points\", '\\n') # Update the hand hand = updateHand(hand,", "str(input('Enter word, or a \".\" to indicate that you are finished: ')) #", "word is entered, it uses up letters from the hand. * After every", "print('Goodbye! Total score:', score, 'points.') else: print('Run out of letters. Total score:', score,", "the word is not valid: if isValidWord(guess, hand, wordList) == False: # Reject", "a '.' or ran out of letters), so tell user the total score", "play the given hand, as follows: * The hand is displayed. * The", "When a valid word is entered, it uses up letters from the hand.", "wordList) == False: # Reject invalid word (print a message followed by a", "or a \".\" to indicate that you are finished: ')) # If the", "input a word or a single period (the string \".\") to indicate they're", "input is not a single period): else: # If the word is not", "* The user may input a word or a single period (the string", "word or a single period (the string \".\") to indicate they're done playing", "hand finishes. * The hand finishes when there are no more unused letters", "points) \"\"\" # BEGIN PSEUDOCODE (download ps4a.py to see) # Keep track of", "a blank line score += getWordScore(guess, n) print('\"'+guess+'\"', \"earned\", getWordScore(guess, n), \"points. Total:\",", "the updated total score, in one line followed by a blank line score", "== '.': print('Goodbye! Total score:', score, 'points.') else: print('Run out of letters. Total", "word scores is displayed when the hand finishes. * The hand finishes when", "letters left in the hand: while calculateHandlen(hand) > 0: # Display the hand", "of the word scores is displayed when the hand finishes. * The hand", "+= getWordScore(guess, n) print('\"'+guess+'\"', \"earned\", getWordScore(guess, n), \"points. Total:\", score, \"points\", '\\n') #", "a \".\" hand: dictionary (string -> int) wordList: list of lowercase strings n:", "wordList, n): \"\"\" Allows the user to play the given hand, as follows:", "uses up letters from the hand. * After every valid word: the score", "# Ask user for input guess = str(input('Enter word, or a \".\" to", "the user inputs a \".\" hand: dictionary (string -> int) wordList: list of", "word earned, and the updated total score, in one line followed by a", "\".\" hand: dictionary (string -> int) wordList: list of lowercase strings n: integer", "still letters left in the hand: while calculateHandlen(hand) > 0: # Display the", "\"points\", '\\n') # Update the hand hand = updateHand(hand, guess) # Game is", "score for that word is displayed, the remaining letters in the hand are", "0 # As long as there are still letters left in the hand:", "The user may input a word or a single period (the string \".\")", "\".\" to indicate that you are finished: ')) # If the input is", "(the string \".\") to indicate they're done playing * Invalid words are rejected,", "a valid word is entered, it uses up letters from the hand. *", "word until they enter a valid word or \".\" * When a valid", "letters in the hand are displayed, and the user is asked to input", "may input a word or a single period (the string \".\") to indicate", "word is displayed, the remaining letters in the hand are displayed, and the", "of the loop) break # Otherwise (the input is not a single period):", "displayed. * The user may input a word or a single period (the", "until they enter a valid word or \".\" * When a valid word", "there are still letters left in the hand: while calculateHandlen(hand) > 0: #", "line) print('Invalid word, please try again.', '\\n') # Otherwise (the word is valid):", "end=' '); displayHand(hand) # Ask user for input guess = str(input('Enter word, or", "(HAND_SIZE; i.e., hand size required for additional points) \"\"\" # BEGIN PSEUDOCODE (download", "is not a single period): else: # If the word is not valid:", "the hand. * After every valid word: the score for that word is", "displayHand(hand) # Ask user for input guess = str(input('Enter word, or a \".\"", "'.': print('Goodbye! Total score:', score, 'points.') else: print('Run out of letters. Total score:',", "Tell the user how many points the word earned, and the updated total", "so tell user the total score if guess == '.': print('Goodbye! Total score:'," ]
[]
[ "# Generated by Django 2.1.7 on 2019-02-15 07:55 from django.db import migrations class", "2.1.7 on 2019-02-15 07:55 from django.db import migrations class Migration(migrations.Migration): dependencies = [(\"question\",", "dependencies = [(\"question\", \"0005_merge_20190215_0616\")] operations = [ migrations.RemoveField(model_name=\"answer\", name=\"is_visible\"), migrations.RemoveField(model_name=\"comment\", name=\"is_visible\"), migrations.RemoveField(model_name=\"question\", name=\"is_visible\"),", "by Django 2.1.7 on 2019-02-15 07:55 from django.db import migrations class Migration(migrations.Migration): dependencies", "class Migration(migrations.Migration): dependencies = [(\"question\", \"0005_merge_20190215_0616\")] operations = [ migrations.RemoveField(model_name=\"answer\", name=\"is_visible\"), migrations.RemoveField(model_name=\"comment\", name=\"is_visible\"),", "on 2019-02-15 07:55 from django.db import migrations class Migration(migrations.Migration): dependencies = [(\"question\", \"0005_merge_20190215_0616\")]", "Django 2.1.7 on 2019-02-15 07:55 from django.db import migrations class Migration(migrations.Migration): dependencies =", "import migrations class Migration(migrations.Migration): dependencies = [(\"question\", \"0005_merge_20190215_0616\")] operations = [ migrations.RemoveField(model_name=\"answer\", name=\"is_visible\"),", "django.db import migrations class Migration(migrations.Migration): dependencies = [(\"question\", \"0005_merge_20190215_0616\")] operations = [ migrations.RemoveField(model_name=\"answer\",", "Migration(migrations.Migration): dependencies = [(\"question\", \"0005_merge_20190215_0616\")] operations = [ migrations.RemoveField(model_name=\"answer\", name=\"is_visible\"), migrations.RemoveField(model_name=\"comment\", name=\"is_visible\"), migrations.RemoveField(model_name=\"question\",", "2019-02-15 07:55 from django.db import migrations class Migration(migrations.Migration): dependencies = [(\"question\", \"0005_merge_20190215_0616\")] operations", "migrations class Migration(migrations.Migration): dependencies = [(\"question\", \"0005_merge_20190215_0616\")] operations = [ migrations.RemoveField(model_name=\"answer\", name=\"is_visible\"), migrations.RemoveField(model_name=\"comment\",", "Generated by Django 2.1.7 on 2019-02-15 07:55 from django.db import migrations class Migration(migrations.Migration):", "= [(\"question\", \"0005_merge_20190215_0616\")] operations = [ migrations.RemoveField(model_name=\"answer\", name=\"is_visible\"), migrations.RemoveField(model_name=\"comment\", name=\"is_visible\"), migrations.RemoveField(model_name=\"question\", name=\"is_visible\"), ]", "07:55 from django.db import migrations class Migration(migrations.Migration): dependencies = [(\"question\", \"0005_merge_20190215_0616\")] operations =", "from django.db import migrations class Migration(migrations.Migration): dependencies = [(\"question\", \"0005_merge_20190215_0616\")] operations = [" ]
[ "active_channel = hexchat.get_info('channel') logging.info('Changed active tab to %s', active_channel) def on_highlight_notification(word, word_eol, userdata):", "Notification Server through DBus') interface.create_notification(nickname, network, channel, title, text, message_type) except dbus.exceptions.DBusException: logging.warning('DBus", "logging.debug('Message type: \"%s\"', message_type) logging.debug('Message: %s', repr(text)) # Ignore notification if window is", "NOTIFICATION_SERVER = '/home/skontar/Repos/hexchat-plugins/notification_server.py' LOG = '~/highlights_notifications.log' FORMAT = '%(asctime)-24s %(levelname)-9s %(message)s' logging.basicConfig(filename=path.expanduser(LOG), format=FORMAT,", "input_balloon_priv 0') hexchat.hook_print('Focus Tab', on_focus_tab) hexchat.hook_unload(on_unload) hexchat.hook_print('Channel Action Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Channel Msg", "Notification Server') interface.quit() except (AttributeError, dbus.exceptions.DBusException): logging.warning('Quit message to Notification Server failed') logging.info('Explicitly", "unhook it just before exit, but that did # not work either. I", "path import dbus import hexchat __module_name__ = 'highlights_notifications' __module_description__ = 'Better notifications with", "logging.warning('DBus connection to Notification Server fail') logging.warning('Notification fallback') hexchat.command('TRAY -b \"{}\" {}'.format(title, text))", "but that stopped to # work when I hooked `sys.excepthook`. I have tried", "as channel is already active') return hexchat.EAT_NONE if interface is None: logging.debug('No DBus", "Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Private Message', on_highlight_notification, userdata='PVT') hexchat.hook_print('Private Message to Dialog', on_highlight_notification, userdata='PVT')", "get_dbus_interface() return hexchat.EAT_NONE def on_unload(userdata): global interface logging.info('HexChat notification server ending') hexchat.prnt('Unloading {},", "return hexchat.EAT_NONE if interface is None: logging.debug('No DBus interface prepared') interface = get_dbus_interface()", "[%s | %s | %s]', network, channel, repr(str(nickname))) logging.debug('Application details: [%s | %s]',", "None: logging.warning('DBus connection to Notification Server fail') logging.warning('Notification fallback') hexchat.command('TRAY -b \"{}\" {}'.format(title,", "through DBus') interface.create_notification(nickname, network, channel, title, text, message_type) except dbus.exceptions.DBusException: logging.warning('DBus message to", "global interface logging.info('HexChat notification server ending') hexchat.prnt('Unloading {}, version {}'.format(__module_name__, __module_version__)) logging.info('Setting common", "text, message_type) except dbus.exceptions.DBusException: logging.warning('DBus message to Notification Server fail') logging.warning('Notification fallback') hexchat.command('TRAY", "to normal') hexchat.command('set input_balloon_hilight 1') hexchat.command('set input_balloon_priv 1') try: logging.info('Sending Quit message to", "input_balloon_priv 1') try: logging.info('Sending Quit message to Notification Server') interface.quit() except (AttributeError, dbus.exceptions.DBusException):", "to # work when I hooked `sys.excepthook`. I have tried to unhook it", "interface Success') return interface except dbus.exceptions.DBusException: logging.debug('DBus interface Fail') server_start() return None def", "to raise an Exception, but that stopped to # work when I hooked", "return interface except dbus.exceptions.DBusException: logging.debug('DBus interface Fail') server_start() return None def on_focus_tab(word, word_eol,", "userdata if message_type == 'HLT': title = 'Highlighted message from: {} ({})'.format(nickname, channel)", "this also kills whole HexChat, so the plugin cannot be restarted. # However,", "channel, repr(str(nickname))) logging.debug('Application details: [%s | %s]', win_status, active_channel) logging.debug('Message type: \"%s\"', message_type)", "hexchat.command('set input_balloon_hilight 0') hexchat.command('set input_balloon_priv 0') hexchat.hook_print('Focus Tab', on_focus_tab) hexchat.hook_unload(on_unload) hexchat.hook_print('Channel Action Hilight',", "if window is active and active channel is the one where message arrived", "from: {} ({})'.format(nickname, channel) else: title = 'Private message from: {} ({})'.format(nickname, network)", "{}'.format(title, text)) interface = get_dbus_interface() return hexchat.EAT_NONE def on_unload(userdata): global interface logging.info('HexChat notification", "However, I did not find a better way, as if the plugin used", "(AttributeError, dbus.exceptions.DBusException): logging.warning('Quit message to Notification Server failed') logging.info('Explicitly quit') # Unfortunately, this", "interface = dbus.Interface(proxy, dbus_interface='com.skontar.HexChat') logging.debug('DBus interface Success') return interface except dbus.exceptions.DBusException: logging.debug('DBus interface", "global active_channel active_channel = hexchat.get_info('channel') logging.info('Changed active tab to %s', active_channel) def on_highlight_notification(word,", "# However, I did not find a better way, as if the plugin", "= get_dbus_interface() if interface is None: logging.warning('DBus connection to Notification Server fail') logging.warning('Notification", "active tab to %s', active_channel) def on_highlight_notification(word, word_eol, userdata): global interface win_status =", "exc_traceback) sys.excepthook = handle_exception def server_start(): logging.info('Starting server') subprocess.Popen('python3 {}'.format(NOTIFICATION_SERVER), shell=True) def get_dbus_interface():", "message_type) logging.debug('Message: %s', repr(text)) # Ignore notification if window is active and active", "= '1.1' NOTIFICATION_SERVER = '/home/skontar/Repos/hexchat-plugins/notification_server.py' LOG = '~/highlights_notifications.log' FORMAT = '%(asctime)-24s %(levelname)-9s %(message)s'", "= None win_status = None interface = None logging.info('HexChat notification plugin starting ==============================')", "fallback') hexchat.command('TRAY -b \"{}\" {}'.format(title, text)) interface = get_dbus_interface() return hexchat.EAT_NONE def on_unload(userdata):", "common notifications to suspended') hexchat.command('set input_balloon_hilight 0') hexchat.command('set input_balloon_priv 0') hexchat.hook_print('Focus Tab', on_focus_tab)", "repr(text)) # Ignore notification if window is active and active channel is the", "message to Notification Server') interface.quit() except (AttributeError, dbus.exceptions.DBusException): logging.warning('Quit message to Notification Server", "__module_version__ = '1.1' NOTIFICATION_SERVER = '/home/skontar/Repos/hexchat-plugins/notification_server.py' LOG = '~/highlights_notifications.log' FORMAT = '%(asctime)-24s %(levelname)-9s", "# Unfortunately, this also kills whole HexChat, so the plugin cannot be restarted.", "either. I find the proper Exception logging more useful than ability to restart", "interface except dbus.exceptions.DBusException: logging.debug('DBus interface Fail') server_start() return None def on_focus_tab(word, word_eol, userdata):", "= handle_exception def server_start(): logging.info('Starting server') subprocess.Popen('python3 {}'.format(NOTIFICATION_SERVER), shell=True) def get_dbus_interface(): logging.info('Getting DBus", "shell=True) def get_dbus_interface(): logging.info('Getting DBus interface for Notification Server') try: session_bus = dbus.SessionBus()", "logging import re import subprocess import sys from os import path import dbus", "channel = hexchat.get_info('channel') nickname = word[0] nickname = re.sub(r'^\\x03\\d+', '', nickname) # Remove", "failed') logging.info('Explicitly quit') # Unfortunately, this also kills whole HexChat, so the plugin", "hooked `sys.excepthook`. I have tried to unhook it just before exit, but that", "interface logging.info('HexChat notification server ending') hexchat.prnt('Unloading {}, version {}'.format(__module_name__, __module_version__)) logging.info('Setting common notifications", "dbus.SessionBus() proxy = session_bus.get_object('com.skontar.HexChat', '/com/skontar/HexChat') interface = dbus.Interface(proxy, dbus_interface='com.skontar.HexChat') logging.debug('DBus interface Success') return", "server') subprocess.Popen('python3 {}'.format(NOTIFICATION_SERVER), shell=True) def get_dbus_interface(): logging.info('Getting DBus interface for Notification Server') try:", "'%(asctime)-24s %(levelname)-9s %(message)s' logging.basicConfig(filename=path.expanduser(LOG), format=FORMAT, level=logging.DEBUG) def handle_exception(exc_type, exc_value, exc_traceback): logging.error('Uncaught exception', exc_info=(exc_type,", "0') hexchat.hook_print('Focus Tab', on_focus_tab) hexchat.hook_unload(on_unload) hexchat.hook_print('Channel Action Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Channel Msg Hilight',", "String Formatting: https://github.com/myano/jenni/wiki/IRC-String-Formatting \"\"\" import logging import re import subprocess import sys from", "try: logging.info('Sending Quit message to Notification Server') interface.quit() except (AttributeError, dbus.exceptions.DBusException): logging.warning('Quit message", "# plugin. exit(1) active_channel = None win_status = None interface = None logging.info('HexChat", "message arrived if win_status == 'active' and channel == active_channel: logging.info('Not showing notifications", "network = hexchat.get_info('network') channel = hexchat.get_info('channel') nickname = word[0] nickname = re.sub(r'^\\x03\\d+', '',", "channel == active_channel: logging.info('Not showing notifications as channel is already active') return hexchat.EAT_NONE", "# Remove color text = word[1] message_type = userdata if message_type == 'HLT':", "interface for Notification Server') try: session_bus = dbus.SessionBus() proxy = session_bus.get_object('com.skontar.HexChat', '/com/skontar/HexChat') interface", "not find a better way, as if the plugin used DBus interface it", "interface.quit() except (AttributeError, dbus.exceptions.DBusException): logging.warning('Quit message to Notification Server failed') logging.info('Explicitly quit') #", "plugin used DBus interface it seems to hang # on exit. Only other", "__module_version__)) logging.info('Setting common notifications to suspended') hexchat.command('set input_balloon_hilight 0') hexchat.command('set input_balloon_priv 0') hexchat.hook_print('Focus", "the one where message arrived if win_status == 'active' and channel == active_channel:", "= hexchat.get_info('channel') logging.info('Changed active tab to %s', active_channel) def on_highlight_notification(word, word_eol, userdata): global", "hexchat.hook_print('Focus Tab', on_focus_tab) hexchat.hook_unload(on_unload) hexchat.hook_print('Channel Action Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Channel Msg Hilight', on_highlight_notification,", "and active channel is the one where message arrived if win_status == 'active'", "re import subprocess import sys from os import path import dbus import hexchat", "'/com/skontar/HexChat') interface = dbus.Interface(proxy, dbus_interface='com.skontar.HexChat') logging.debug('DBus interface Success') return interface except dbus.exceptions.DBusException: logging.debug('DBus", "= hexchat.get_info('network') channel = hexchat.get_info('channel') nickname = word[0] nickname = re.sub(r'^\\x03\\d+', '', nickname)", "if win_status == 'active' and channel == active_channel: logging.info('Not showing notifications as channel", "notifications as channel is already active') return hexchat.EAT_NONE if interface is None: logging.debug('No", "network) logging.info('New notification [%s | %s | %s]', network, channel, repr(str(nickname))) logging.debug('Application details:", "to Notification Server fail') logging.warning('Notification fallback') hexchat.command('TRAY -b \"{}\" {}'.format(title, text)) interface =", "re.sub(r'^\\x03\\d+', '', nickname) # Remove color text = word[1] message_type = userdata if", "the plugin used DBus interface it seems to hang # on exit. Only", "from: {} ({})'.format(nickname, network) logging.info('New notification [%s | %s | %s]', network, channel,", "other workaround I have found was to raise an Exception, but that stopped", "logging.info('Starting server') subprocess.Popen('python3 {}'.format(NOTIFICATION_SERVER), shell=True) def get_dbus_interface(): logging.info('Getting DBus interface for Notification Server')", "IRC String Formatting: https://github.com/myano/jenni/wiki/IRC-String-Formatting \"\"\" import logging import re import subprocess import sys", "if message_type == 'HLT': title = 'Highlighted message from: {} ({})'.format(nickname, channel) else:", "is the one where message arrived if win_status == 'active' and channel ==", "None interface = None logging.info('HexChat notification plugin starting ==============================') server_start() hexchat.prnt('{}, version {}'.format(__module_name__,", "did # not work either. I find the proper Exception logging more useful", "# not work either. I find the proper Exception logging more useful than", "Action Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Channel Msg Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Private Message', on_highlight_notification, userdata='PVT')", "'Highlighted message from: {} ({})'.format(nickname, channel) else: title = 'Private message from: {}", "channel, title, text, message_type) except dbus.exceptions.DBusException: logging.warning('DBus message to Notification Server fail') logging.warning('Notification", "window is active and active channel is the one where message arrived if", "== 'HLT': title = 'Highlighted message from: {} ({})'.format(nickname, channel) else: title =", "to Notification Server') interface.quit() except (AttributeError, dbus.exceptions.DBusException): logging.warning('Quit message to Notification Server failed')", "not work either. I find the proper Exception logging more useful than ability", "-b \"{}\" {}'.format(title, text)) else: try: logging.info('Sending message to Notification Server through DBus')", "'1.1' NOTIFICATION_SERVER = '/home/skontar/Repos/hexchat-plugins/notification_server.py' LOG = '~/highlights_notifications.log' FORMAT = '%(asctime)-24s %(levelname)-9s %(message)s' logging.basicConfig(filename=path.expanduser(LOG),", "# Ignore notification if window is active and active channel is the one", "ability to restart # plugin. exit(1) active_channel = None win_status = None interface", "Remove color text = word[1] message_type = userdata if message_type == 'HLT': title", "where message arrived if win_status == 'active' and channel == active_channel: logging.info('Not showing", "= None logging.info('HexChat notification plugin starting ==============================') server_start() hexchat.prnt('{}, version {}'.format(__module_name__, __module_version__)) logging.info('Setting", "http://hexchat.readthedocs.io/en/latest/script_python.html IRC String Formatting: https://github.com/myano/jenni/wiki/IRC-String-Formatting \"\"\" import logging import re import subprocess import", "so the plugin cannot be restarted. # However, I did not find a", "proper Exception logging more useful than ability to restart # plugin. exit(1) active_channel", "if the plugin used DBus interface it seems to hang # on exit.", "hexchat.command('set input_balloon_priv 0') hexchat.hook_print('Focus Tab', on_focus_tab) hexchat.hook_unload(on_unload) hexchat.hook_print('Channel Action Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Channel", "handle_exception def server_start(): logging.info('Starting server') subprocess.Popen('python3 {}'.format(NOTIFICATION_SERVER), shell=True) def get_dbus_interface(): logging.info('Getting DBus interface", "dbus.exceptions.DBusException: logging.warning('DBus message to Notification Server fail') logging.warning('Notification fallback') hexchat.command('TRAY -b \"{}\" {}'.format(title,", "= word[1] message_type = userdata if message_type == 'HLT': title = 'Highlighted message", "%s | %s]', network, channel, repr(str(nickname))) logging.debug('Application details: [%s | %s]', win_status, active_channel)", "= '~/highlights_notifications.log' FORMAT = '%(asctime)-24s %(levelname)-9s %(message)s' logging.basicConfig(filename=path.expanduser(LOG), format=FORMAT, level=logging.DEBUG) def handle_exception(exc_type, exc_value,", "def get_dbus_interface(): logging.info('Getting DBus interface for Notification Server') try: session_bus = dbus.SessionBus() proxy", "userdata='PVT') hexchat.hook_print('Private Message to Dialog', on_highlight_notification, userdata='PVT') hexchat.hook_print('Private Action to Dialog', on_highlight_notification, userdata='PVT')", "interface.create_notification(nickname, network, channel, title, text, message_type) except dbus.exceptions.DBusException: logging.warning('DBus message to Notification Server", "title = 'Highlighted message from: {} ({})'.format(nickname, channel) else: title = 'Private message", "Server fail') logging.warning('Notification fallback') hexchat.command('TRAY -b \"{}\" {}'.format(title, text)) interface = get_dbus_interface() return", "HexChat, so the plugin cannot be restarted. # However, I did not find", "{}'.format(__module_name__, __module_version__)) logging.info('Setting common notifications to suspended') hexchat.command('set input_balloon_hilight 0') hexchat.command('set input_balloon_priv 0')", "= dbus.SessionBus() proxy = session_bus.get_object('com.skontar.HexChat', '/com/skontar/HexChat') interface = dbus.Interface(proxy, dbus_interface='com.skontar.HexChat') logging.debug('DBus interface Success')", "Exception logging more useful than ability to restart # plugin. exit(1) active_channel =", "nickname = word[0] nickname = re.sub(r'^\\x03\\d+', '', nickname) # Remove color text =", "restart # plugin. exit(1) active_channel = None win_status = None interface = None", "channel is already active') return hexchat.EAT_NONE if interface is None: logging.debug('No DBus interface", "hexchat.command('TRAY -b \"{}\" {}'.format(title, text)) interface = get_dbus_interface() return hexchat.EAT_NONE def on_unload(userdata): global", "logging.error('Uncaught exception', exc_info=(exc_type, exc_value, exc_traceback)) sys.__excepthook__(exc_type, exc_value, exc_traceback) sys.excepthook = handle_exception def server_start():", "exit. Only other workaround I have found was to raise an Exception, but", "one where message arrived if win_status == 'active' and channel == active_channel: logging.info('Not", "= hexchat.get_info('win_status') network = hexchat.get_info('network') channel = hexchat.get_info('channel') nickname = word[0] nickname =", "version {}'.format(__module_name__, __module_version__)) logging.info('Setting common notifications to normal') hexchat.command('set input_balloon_hilight 1') hexchat.command('set input_balloon_priv", "showing notifications as channel is already active') return hexchat.EAT_NONE if interface is None:", "interface is None: logging.debug('No DBus interface prepared') interface = get_dbus_interface() if interface is", "server_start() return None def on_focus_tab(word, word_eol, userdata): global active_channel active_channel = hexchat.get_info('channel') logging.info('Changed", "def on_focus_tab(word, word_eol, userdata): global active_channel active_channel = hexchat.get_info('channel') logging.info('Changed active tab to", "find the proper Exception logging more useful than ability to restart # plugin.", "= 'Better notifications with actions' __module_version__ = '1.1' NOTIFICATION_SERVER = '/home/skontar/Repos/hexchat-plugins/notification_server.py' LOG =", "get_dbus_interface(): logging.info('Getting DBus interface for Notification Server') try: session_bus = dbus.SessionBus() proxy =", "exc_value, exc_traceback)) sys.__excepthook__(exc_type, exc_value, exc_traceback) sys.excepthook = handle_exception def server_start(): logging.info('Starting server') subprocess.Popen('python3", "logging.info('Setting common notifications to normal') hexchat.command('set input_balloon_hilight 1') hexchat.command('set input_balloon_priv 1') try: logging.info('Sending", "work either. I find the proper Exception logging more useful than ability to", "actions' __module_version__ = '1.1' NOTIFICATION_SERVER = '/home/skontar/Repos/hexchat-plugins/notification_server.py' LOG = '~/highlights_notifications.log' FORMAT = '%(asctime)-24s", "plugin starting ==============================') server_start() hexchat.prnt('{}, version {}'.format(__module_name__, __module_version__)) logging.info('Setting common notifications to suspended')", "dbus.exceptions.DBusException): logging.warning('Quit message to Notification Server failed') logging.info('Explicitly quit') # Unfortunately, this also", "did not find a better way, as if the plugin used DBus interface", "%s', repr(text)) # Ignore notification if window is active and active channel is", "else: try: logging.info('Sending message to Notification Server through DBus') interface.create_notification(nickname, network, channel, title,", "logging.debug('No DBus interface prepared') interface = get_dbus_interface() if interface is None: logging.warning('DBus connection", "dbus.exceptions.DBusException: logging.debug('DBus interface Fail') server_start() return None def on_focus_tab(word, word_eol, userdata): global active_channel", "logging.debug('Application details: [%s | %s]', win_status, active_channel) logging.debug('Message type: \"%s\"', message_type) logging.debug('Message: %s',", "None win_status = None interface = None logging.info('HexChat notification plugin starting ==============================') server_start()", "= 'Highlighted message from: {} ({})'.format(nickname, channel) else: title = 'Private message from:", "Unfortunately, this also kills whole HexChat, so the plugin cannot be restarted. #", "restarted. # However, I did not find a better way, as if the", "Exception, but that stopped to # work when I hooked `sys.excepthook`. I have", "except dbus.exceptions.DBusException: logging.debug('DBus interface Fail') server_start() return None def on_focus_tab(word, word_eol, userdata): global", "'/home/skontar/Repos/hexchat-plugins/notification_server.py' LOG = '~/highlights_notifications.log' FORMAT = '%(asctime)-24s %(levelname)-9s %(message)s' logging.basicConfig(filename=path.expanduser(LOG), format=FORMAT, level=logging.DEBUG) def", "as if the plugin used DBus interface it seems to hang # on", "LOG = '~/highlights_notifications.log' FORMAT = '%(asctime)-24s %(levelname)-9s %(message)s' logging.basicConfig(filename=path.expanduser(LOG), format=FORMAT, level=logging.DEBUG) def handle_exception(exc_type,", "on_highlight_notification, userdata='PVT') hexchat.hook_print('Private Message to Dialog', on_highlight_notification, userdata='PVT') hexchat.hook_print('Private Action to Dialog', on_highlight_notification,", "interface prepared') interface = get_dbus_interface() if interface is None: logging.warning('DBus connection to Notification", "active_channel active_channel = hexchat.get_info('channel') logging.info('Changed active tab to %s', active_channel) def on_highlight_notification(word, word_eol,", "= session_bus.get_object('com.skontar.HexChat', '/com/skontar/HexChat') interface = dbus.Interface(proxy, dbus_interface='com.skontar.HexChat') logging.debug('DBus interface Success') return interface except", "handle_exception(exc_type, exc_value, exc_traceback): logging.error('Uncaught exception', exc_info=(exc_type, exc_value, exc_traceback)) sys.__excepthook__(exc_type, exc_value, exc_traceback) sys.excepthook =", "Notification Server fail') logging.warning('Notification fallback') hexchat.command('TRAY -b \"{}\" {}'.format(title, text)) interface = get_dbus_interface()", "hexchat.EAT_NONE def on_unload(userdata): global interface logging.info('HexChat notification server ending') hexchat.prnt('Unloading {}, version {}'.format(__module_name__,", "found was to raise an Exception, but that stopped to # work when", "notifications to suspended') hexchat.command('set input_balloon_hilight 0') hexchat.command('set input_balloon_priv 0') hexchat.hook_print('Focus Tab', on_focus_tab) hexchat.hook_unload(on_unload)", "exc_info=(exc_type, exc_value, exc_traceback)) sys.__excepthook__(exc_type, exc_value, exc_traceback) sys.excepthook = handle_exception def server_start(): logging.info('Starting server')", "notifications with actions' __module_version__ = '1.1' NOTIFICATION_SERVER = '/home/skontar/Repos/hexchat-plugins/notification_server.py' LOG = '~/highlights_notifications.log' FORMAT", "def on_unload(userdata): global interface logging.info('HexChat notification server ending') hexchat.prnt('Unloading {}, version {}'.format(__module_name__, __module_version__))", "Tab', on_focus_tab) hexchat.hook_unload(on_unload) hexchat.hook_print('Channel Action Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Channel Msg Hilight', on_highlight_notification, userdata='HLT')", "message_type = userdata if message_type == 'HLT': title = 'Highlighted message from: {}", "active_channel) def on_highlight_notification(word, word_eol, userdata): global interface win_status = hexchat.get_info('win_status') network = hexchat.get_info('network')", "channel is the one where message arrived if win_status == 'active' and channel", "workaround I have found was to raise an Exception, but that stopped to", "on_focus_tab(word, word_eol, userdata): global active_channel active_channel = hexchat.get_info('channel') logging.info('Changed active tab to %s',", "{}, version {}'.format(__module_name__, __module_version__)) logging.info('Setting common notifications to normal') hexchat.command('set input_balloon_hilight 1') hexchat.command('set", "get_dbus_interface() if interface is None: logging.warning('DBus connection to Notification Server fail') logging.warning('Notification fallback')", "DBus interface for Notification Server') try: session_bus = dbus.SessionBus() proxy = session_bus.get_object('com.skontar.HexChat', '/com/skontar/HexChat')", "hexchat.command('set input_balloon_hilight 1') hexchat.command('set input_balloon_priv 1') try: logging.info('Sending Quit message to Notification Server')", "= None interface = None logging.info('HexChat notification plugin starting ==============================') server_start() hexchat.prnt('{}, version", "dbus.Interface(proxy, dbus_interface='com.skontar.HexChat') logging.debug('DBus interface Success') return interface except dbus.exceptions.DBusException: logging.debug('DBus interface Fail') server_start()", "notifications to normal') hexchat.command('set input_balloon_hilight 1') hexchat.command('set input_balloon_priv 1') try: logging.info('Sending Quit message", "is already active') return hexchat.EAT_NONE if interface is None: logging.debug('No DBus interface prepared')", "logging.info('HexChat notification server ending') hexchat.prnt('Unloading {}, version {}'.format(__module_name__, __module_version__)) logging.info('Setting common notifications to", "1') hexchat.command('set input_balloon_priv 1') try: logging.info('Sending Quit message to Notification Server') interface.quit() except", "I find the proper Exception logging more useful than ability to restart #", "fail') logging.warning('Notification fallback') hexchat.command('TRAY -b \"{}\" {}'.format(title, text)) interface = get_dbus_interface() return hexchat.EAT_NONE", "https://github.com/myano/jenni/wiki/IRC-String-Formatting \"\"\" import logging import re import subprocess import sys from os import", "None: logging.debug('No DBus interface prepared') interface = get_dbus_interface() if interface is None: logging.warning('DBus", "and channel == active_channel: logging.info('Not showing notifications as channel is already active') return", "= hexchat.get_info('channel') nickname = word[0] nickname = re.sub(r'^\\x03\\d+', '', nickname) # Remove color", "{}'.format(title, text)) else: try: logging.info('Sending message to Notification Server through DBus') interface.create_notification(nickname, network,", "Msg Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Private Message', on_highlight_notification, userdata='PVT') hexchat.hook_print('Private Message to Dialog', on_highlight_notification,", "input_balloon_hilight 0') hexchat.command('set input_balloon_priv 0') hexchat.hook_print('Focus Tab', on_focus_tab) hexchat.hook_unload(on_unload) hexchat.hook_print('Channel Action Hilight', on_highlight_notification,", "useful than ability to restart # plugin. exit(1) active_channel = None win_status =", "'~/highlights_notifications.log' FORMAT = '%(asctime)-24s %(levelname)-9s %(message)s' logging.basicConfig(filename=path.expanduser(LOG), format=FORMAT, level=logging.DEBUG) def handle_exception(exc_type, exc_value, exc_traceback):", "if interface is None: logging.warning('DBus connection to Notification Server fail') logging.warning('Notification fallback') hexchat.command('TRAY", "word[0] nickname = re.sub(r'^\\x03\\d+', '', nickname) # Remove color text = word[1] message_type", "nickname = re.sub(r'^\\x03\\d+', '', nickname) # Remove color text = word[1] message_type =", "return None def on_focus_tab(word, word_eol, userdata): global active_channel active_channel = hexchat.get_info('channel') logging.info('Changed active", "quit') # Unfortunately, this also kills whole HexChat, so the plugin cannot be", "0') hexchat.command('set input_balloon_priv 0') hexchat.hook_print('Focus Tab', on_focus_tab) hexchat.hook_unload(on_unload) hexchat.hook_print('Channel Action Hilight', on_highlight_notification, userdata='HLT')", "hexchat __module_name__ = 'highlights_notifications' __module_description__ = 'Better notifications with actions' __module_version__ = '1.1'", "the proper Exception logging more useful than ability to restart # plugin. exit(1)", "nickname) # Remove color text = word[1] message_type = userdata if message_type ==", "%s', active_channel) def on_highlight_notification(word, word_eol, userdata): global interface win_status = hexchat.get_info('win_status') network =", "logging.info('Getting DBus interface for Notification Server') try: session_bus = dbus.SessionBus() proxy = session_bus.get_object('com.skontar.HexChat',", "arrived if win_status == 'active' and channel == active_channel: logging.info('Not showing notifications as", "Python Interface: http://hexchat.readthedocs.io/en/latest/script_python.html IRC String Formatting: https://github.com/myano/jenni/wiki/IRC-String-Formatting \"\"\" import logging import re import", "def on_highlight_notification(word, word_eol, userdata): global interface win_status = hexchat.get_info('win_status') network = hexchat.get_info('network') channel", "dbus_interface='com.skontar.HexChat') logging.debug('DBus interface Success') return interface except dbus.exceptions.DBusException: logging.debug('DBus interface Fail') server_start() return", "interface win_status = hexchat.get_info('win_status') network = hexchat.get_info('network') channel = hexchat.get_info('channel') nickname = word[0]", "\"{}\" {}'.format(title, text)) interface = get_dbus_interface() return hexchat.EAT_NONE def on_unload(userdata): global interface logging.info('HexChat", "to Notification Server failed') logging.info('Explicitly quit') # Unfortunately, this also kills whole HexChat,", "Ignore notification if window is active and active channel is the one where", "but that did # not work either. I find the proper Exception logging", "Formatting: https://github.com/myano/jenni/wiki/IRC-String-Formatting \"\"\" import logging import re import subprocess import sys from os", "format=FORMAT, level=logging.DEBUG) def handle_exception(exc_type, exc_value, exc_traceback): logging.error('Uncaught exception', exc_info=(exc_type, exc_value, exc_traceback)) sys.__excepthook__(exc_type, exc_value,", "level=logging.DEBUG) def handle_exception(exc_type, exc_value, exc_traceback): logging.error('Uncaught exception', exc_info=(exc_type, exc_value, exc_traceback)) sys.__excepthook__(exc_type, exc_value, exc_traceback)", "notification if window is active and active channel is the one where message", "import subprocess import sys from os import path import dbus import hexchat __module_name__", "'HLT': title = 'Highlighted message from: {} ({})'.format(nickname, channel) else: title = 'Private", "is active and active channel is the one where message arrived if win_status", "userdata): global active_channel active_channel = hexchat.get_info('channel') logging.info('Changed active tab to %s', active_channel) def", "cannot be restarted. # However, I did not find a better way, as", "just before exit, but that did # not work either. I find the", "__module_description__ = 'Better notifications with actions' __module_version__ = '1.1' NOTIFICATION_SERVER = '/home/skontar/Repos/hexchat-plugins/notification_server.py' LOG", "== active_channel: logging.info('Not showing notifications as channel is already active') return hexchat.EAT_NONE if", "except (AttributeError, dbus.exceptions.DBusException): logging.warning('Quit message to Notification Server failed') logging.info('Explicitly quit') # Unfortunately,", "the plugin cannot be restarted. # However, I did not find a better", "-b \"{}\" {}'.format(title, text)) interface = get_dbus_interface() return hexchat.EAT_NONE def on_unload(userdata): global interface", "proxy = session_bus.get_object('com.skontar.HexChat', '/com/skontar/HexChat') interface = dbus.Interface(proxy, dbus_interface='com.skontar.HexChat') logging.debug('DBus interface Success') return interface", "%(message)s' logging.basicConfig(filename=path.expanduser(LOG), format=FORMAT, level=logging.DEBUG) def handle_exception(exc_type, exc_value, exc_traceback): logging.error('Uncaught exception', exc_info=(exc_type, exc_value, exc_traceback))", "Interface: http://hexchat.readthedocs.io/en/latest/script_python.html IRC String Formatting: https://github.com/myano/jenni/wiki/IRC-String-Formatting \"\"\" import logging import re import subprocess", "to %s', active_channel) def on_highlight_notification(word, word_eol, userdata): global interface win_status = hexchat.get_info('win_status') network", "interface = get_dbus_interface() return hexchat.EAT_NONE def on_unload(userdata): global interface logging.info('HexChat notification server ending')", "tab to %s', active_channel) def on_highlight_notification(word, word_eol, userdata): global interface win_status = hexchat.get_info('win_status')", "win_status, active_channel) logging.debug('Message type: \"%s\"', message_type) logging.debug('Message: %s', repr(text)) # Ignore notification if", "hexchat.prnt('Unloading {}, version {}'.format(__module_name__, __module_version__)) logging.info('Setting common notifications to normal') hexchat.command('set input_balloon_hilight 1')", "to suspended') hexchat.command('set input_balloon_hilight 0') hexchat.command('set input_balloon_priv 0') hexchat.hook_print('Focus Tab', on_focus_tab) hexchat.hook_unload(on_unload) hexchat.hook_print('Channel", "whole HexChat, so the plugin cannot be restarted. # However, I did not", "subprocess import sys from os import path import dbus import hexchat __module_name__ =", "import hexchat __module_name__ = 'highlights_notifications' __module_description__ = 'Better notifications with actions' __module_version__ =", "sys.excepthook = handle_exception def server_start(): logging.info('Starting server') subprocess.Popen('python3 {}'.format(NOTIFICATION_SERVER), shell=True) def get_dbus_interface(): logging.info('Getting", "server_start(): logging.info('Starting server') subprocess.Popen('python3 {}'.format(NOTIFICATION_SERVER), shell=True) def get_dbus_interface(): logging.info('Getting DBus interface for Notification", "normal') hexchat.command('set input_balloon_hilight 1') hexchat.command('set input_balloon_priv 1') try: logging.info('Sending Quit message to Notification", "hexchat.prnt('{}, version {}'.format(__module_name__, __module_version__)) logging.info('Setting common notifications to suspended') hexchat.command('set input_balloon_hilight 0') hexchat.command('set", "word_eol, userdata): global active_channel active_channel = hexchat.get_info('channel') logging.info('Changed active tab to %s', active_channel)", "logging.warning('Notification fallback') hexchat.command('TRAY -b \"{}\" {}'.format(title, text)) else: try: logging.info('Sending message to Notification", "sys from os import path import dbus import hexchat __module_name__ = 'highlights_notifications' __module_description__", "I hooked `sys.excepthook`. I have tried to unhook it just before exit, but", "else: title = 'Private message from: {} ({})'.format(nickname, network) logging.info('New notification [%s |", "= get_dbus_interface() return hexchat.EAT_NONE def on_unload(userdata): global interface logging.info('HexChat notification server ending') hexchat.prnt('Unloading", "that did # not work either. I find the proper Exception logging more", "interface = None logging.info('HexChat notification plugin starting ==============================') server_start() hexchat.prnt('{}, version {}'.format(__module_name__, __module_version__))", "Fail') server_start() return None def on_focus_tab(word, word_eol, userdata): global active_channel active_channel = hexchat.get_info('channel')", "notifications with actions. HexChat Python Interface: http://hexchat.readthedocs.io/en/latest/script_python.html IRC String Formatting: https://github.com/myano/jenni/wiki/IRC-String-Formatting \"\"\" import", "# on exit. Only other workaround I have found was to raise an", "except dbus.exceptions.DBusException: logging.warning('DBus message to Notification Server fail') logging.warning('Notification fallback') hexchat.command('TRAY -b \"{}\"", "[%s | %s]', win_status, active_channel) logging.debug('Message type: \"%s\"', message_type) logging.debug('Message: %s', repr(text)) #", "'Better notifications with actions' __module_version__ = '1.1' NOTIFICATION_SERVER = '/home/skontar/Repos/hexchat-plugins/notification_server.py' LOG = '~/highlights_notifications.log'", "exit, but that did # not work either. I find the proper Exception", "session_bus = dbus.SessionBus() proxy = session_bus.get_object('com.skontar.HexChat', '/com/skontar/HexChat') interface = dbus.Interface(proxy, dbus_interface='com.skontar.HexChat') logging.debug('DBus interface", "interface is None: logging.warning('DBus connection to Notification Server fail') logging.warning('Notification fallback') hexchat.command('TRAY -b", "on_highlight_notification, userdata='HLT') hexchat.hook_print('Private Message', on_highlight_notification, userdata='PVT') hexchat.hook_print('Private Message to Dialog', on_highlight_notification, userdata='PVT') hexchat.hook_print('Private", "hexchat.get_info('channel') logging.info('Changed active tab to %s', active_channel) def on_highlight_notification(word, word_eol, userdata): global interface", "starting ==============================') server_start() hexchat.prnt('{}, version {}'.format(__module_name__, __module_version__)) logging.info('Setting common notifications to suspended') hexchat.command('set", "'', nickname) # Remove color text = word[1] message_type = userdata if message_type", "os import path import dbus import hexchat __module_name__ = 'highlights_notifications' __module_description__ = 'Better", "notification [%s | %s | %s]', network, channel, repr(str(nickname))) logging.debug('Application details: [%s |", "message_type == 'HLT': title = 'Highlighted message from: {} ({})'.format(nickname, channel) else: title", "plugin. exit(1) active_channel = None win_status = None interface = None logging.info('HexChat notification", "active_channel: logging.info('Not showing notifications as channel is already active') return hexchat.EAT_NONE if interface", "with actions' __module_version__ = '1.1' NOTIFICATION_SERVER = '/home/skontar/Repos/hexchat-plugins/notification_server.py' LOG = '~/highlights_notifications.log' FORMAT =", "__module_name__ = 'highlights_notifications' __module_description__ = 'Better notifications with actions' __module_version__ = '1.1' NOTIFICATION_SERVER", "logging.warning('DBus message to Notification Server fail') logging.warning('Notification fallback') hexchat.command('TRAY -b \"{}\" {}'.format(title, text))", "dbus import hexchat __module_name__ = 'highlights_notifications' __module_description__ = 'Better notifications with actions' __module_version__", "suspended') hexchat.command('set input_balloon_hilight 0') hexchat.command('set input_balloon_priv 0') hexchat.hook_print('Focus Tab', on_focus_tab) hexchat.hook_unload(on_unload) hexchat.hook_print('Channel Action", "hexchat.get_info('win_status') network = hexchat.get_info('network') channel = hexchat.get_info('channel') nickname = word[0] nickname = re.sub(r'^\\x03\\d+',", "logging.info('Changed active tab to %s', active_channel) def on_highlight_notification(word, word_eol, userdata): global interface win_status", "it just before exit, but that did # not work either. I find", "({})'.format(nickname, channel) else: title = 'Private message from: {} ({})'.format(nickname, network) logging.info('New notification", "for Notification Server') try: session_bus = dbus.SessionBus() proxy = session_bus.get_object('com.skontar.HexChat', '/com/skontar/HexChat') interface =", "# work when I hooked `sys.excepthook`. I have tried to unhook it just", "text = word[1] message_type = userdata if message_type == 'HLT': title = 'Highlighted", "logging.warning('Notification fallback') hexchat.command('TRAY -b \"{}\" {}'.format(title, text)) interface = get_dbus_interface() return hexchat.EAT_NONE def", "message from: {} ({})'.format(nickname, channel) else: title = 'Private message from: {} ({})'.format(nickname,", "'highlights_notifications' __module_description__ = 'Better notifications with actions' __module_version__ = '1.1' NOTIFICATION_SERVER = '/home/skontar/Repos/hexchat-plugins/notification_server.py'", "win_status = None interface = None logging.info('HexChat notification plugin starting ==============================') server_start() hexchat.prnt('{},", "active and active channel is the one where message arrived if win_status ==", "DBus') interface.create_notification(nickname, network, channel, title, text, message_type) except dbus.exceptions.DBusException: logging.warning('DBus message to Notification", "details: [%s | %s]', win_status, active_channel) logging.debug('Message type: \"%s\"', message_type) logging.debug('Message: %s', repr(text))", "%(levelname)-9s %(message)s' logging.basicConfig(filename=path.expanduser(LOG), format=FORMAT, level=logging.DEBUG) def handle_exception(exc_type, exc_value, exc_traceback): logging.error('Uncaught exception', exc_info=(exc_type, exc_value,", "HexChat Python Interface: http://hexchat.readthedocs.io/en/latest/script_python.html IRC String Formatting: https://github.com/myano/jenni/wiki/IRC-String-Formatting \"\"\" import logging import re", "= '/home/skontar/Repos/hexchat-plugins/notification_server.py' LOG = '~/highlights_notifications.log' FORMAT = '%(asctime)-24s %(levelname)-9s %(message)s' logging.basicConfig(filename=path.expanduser(LOG), format=FORMAT, level=logging.DEBUG)", "exc_traceback): logging.error('Uncaught exception', exc_info=(exc_type, exc_value, exc_traceback)) sys.__excepthook__(exc_type, exc_value, exc_traceback) sys.excepthook = handle_exception def", "be restarted. # However, I did not find a better way, as if", "version {}'.format(__module_name__, __module_version__)) logging.info('Setting common notifications to suspended') hexchat.command('set input_balloon_hilight 0') hexchat.command('set input_balloon_priv", "message to Notification Server failed') logging.info('Explicitly quit') # Unfortunately, this also kills whole", "DBus interface prepared') interface = get_dbus_interface() if interface is None: logging.warning('DBus connection to", "seems to hang # on exit. Only other workaround I have found was", "logging.info('Not showing notifications as channel is already active') return hexchat.EAT_NONE if interface is", "logging.warning('Quit message to Notification Server failed') logging.info('Explicitly quit') # Unfortunately, this also kills", "Server fail') logging.warning('Notification fallback') hexchat.command('TRAY -b \"{}\" {}'.format(title, text)) else: try: logging.info('Sending message", "word_eol, userdata): global interface win_status = hexchat.get_info('win_status') network = hexchat.get_info('network') channel = hexchat.get_info('channel')", "exc_value, exc_traceback): logging.error('Uncaught exception', exc_info=(exc_type, exc_value, exc_traceback)) sys.__excepthook__(exc_type, exc_value, exc_traceback) sys.excepthook = handle_exception", "if interface is None: logging.debug('No DBus interface prepared') interface = get_dbus_interface() if interface", "{}'.format(__module_name__, __module_version__)) logging.info('Setting common notifications to normal') hexchat.command('set input_balloon_hilight 1') hexchat.command('set input_balloon_priv 1')", "Server failed') logging.info('Explicitly quit') # Unfortunately, this also kills whole HexChat, so the", "| %s]', network, channel, repr(str(nickname))) logging.debug('Application details: [%s | %s]', win_status, active_channel) logging.debug('Message", "server_start() hexchat.prnt('{}, version {}'.format(__module_name__, __module_version__)) logging.info('Setting common notifications to suspended') hexchat.command('set input_balloon_hilight 0')", "return hexchat.EAT_NONE def on_unload(userdata): global interface logging.info('HexChat notification server ending') hexchat.prnt('Unloading {}, version", "exit(1) active_channel = None win_status = None interface = None logging.info('HexChat notification plugin", "common notifications to normal') hexchat.command('set input_balloon_hilight 1') hexchat.command('set input_balloon_priv 1') try: logging.info('Sending Quit", "= re.sub(r'^\\x03\\d+', '', nickname) # Remove color text = word[1] message_type = userdata", "title = 'Private message from: {} ({})'.format(nickname, network) logging.info('New notification [%s | %s", "on_highlight_notification(word, word_eol, userdata): global interface win_status = hexchat.get_info('win_status') network = hexchat.get_info('network') channel =", "1') try: logging.info('Sending Quit message to Notification Server') interface.quit() except (AttributeError, dbus.exceptions.DBusException): logging.warning('Quit", "way, as if the plugin used DBus interface it seems to hang #", "when I hooked `sys.excepthook`. I have tried to unhook it just before exit,", "than ability to restart # plugin. exit(1) active_channel = None win_status = None", "import sys from os import path import dbus import hexchat __module_name__ = 'highlights_notifications'", "active channel is the one where message arrived if win_status == 'active' and", "interface = get_dbus_interface() if interface is None: logging.warning('DBus connection to Notification Server fail')", "= word[0] nickname = re.sub(r'^\\x03\\d+', '', nickname) # Remove color text = word[1]", "import dbus import hexchat __module_name__ = 'highlights_notifications' __module_description__ = 'Better notifications with actions'", "exc_value, exc_traceback) sys.excepthook = handle_exception def server_start(): logging.info('Starting server') subprocess.Popen('python3 {}'.format(NOTIFICATION_SERVER), shell=True) def", "an Exception, but that stopped to # work when I hooked `sys.excepthook`. I", "work when I hooked `sys.excepthook`. I have tried to unhook it just before", "connection to Notification Server fail') logging.warning('Notification fallback') hexchat.command('TRAY -b \"{}\" {}'.format(title, text)) else:", "exception', exc_info=(exc_type, exc_value, exc_traceback)) sys.__excepthook__(exc_type, exc_value, exc_traceback) sys.excepthook = handle_exception def server_start(): logging.info('Starting", "logging.info('Setting common notifications to suspended') hexchat.command('set input_balloon_hilight 0') hexchat.command('set input_balloon_priv 0') hexchat.hook_print('Focus Tab',", "({})'.format(nickname, network) logging.info('New notification [%s | %s | %s]', network, channel, repr(str(nickname))) logging.debug('Application", "stopped to # work when I hooked `sys.excepthook`. I have tried to unhook", "exc_traceback)) sys.__excepthook__(exc_type, exc_value, exc_traceback) sys.excepthook = handle_exception def server_start(): logging.info('Starting server') subprocess.Popen('python3 {}'.format(NOTIFICATION_SERVER),", "sys.__excepthook__(exc_type, exc_value, exc_traceback) sys.excepthook = handle_exception def server_start(): logging.info('Starting server') subprocess.Popen('python3 {}'.format(NOTIFICATION_SERVER), shell=True)", "DBus interface it seems to hang # on exit. Only other workaround I", "%s]', network, channel, repr(str(nickname))) logging.debug('Application details: [%s | %s]', win_status, active_channel) logging.debug('Message type:", "Server through DBus') interface.create_notification(nickname, network, channel, title, text, message_type) except dbus.exceptions.DBusException: logging.warning('DBus message", "message from: {} ({})'.format(nickname, network) logging.info('New notification [%s | %s | %s]', network,", "\"%s\"', message_type) logging.debug('Message: %s', repr(text)) # Ignore notification if window is active and", "network, channel, title, text, message_type) except dbus.exceptions.DBusException: logging.warning('DBus message to Notification Server fail')", "== 'active' and channel == active_channel: logging.info('Not showing notifications as channel is already", "= dbus.Interface(proxy, dbus_interface='com.skontar.HexChat') logging.debug('DBus interface Success') return interface except dbus.exceptions.DBusException: logging.debug('DBus interface Fail')", "interface Fail') server_start() return None def on_focus_tab(word, word_eol, userdata): global active_channel active_channel =", "None def on_focus_tab(word, word_eol, userdata): global active_channel active_channel = hexchat.get_info('channel') logging.info('Changed active tab", "import logging import re import subprocess import sys from os import path import", "active_channel) logging.debug('Message type: \"%s\"', message_type) logging.debug('Message: %s', repr(text)) # Ignore notification if window", "try: logging.info('Sending message to Notification Server through DBus') interface.create_notification(nickname, network, channel, title, text,", "logging.basicConfig(filename=path.expanduser(LOG), format=FORMAT, level=logging.DEBUG) def handle_exception(exc_type, exc_value, exc_traceback): logging.error('Uncaught exception', exc_info=(exc_type, exc_value, exc_traceback)) sys.__excepthook__(exc_type,", "on_highlight_notification, userdata='HLT') hexchat.hook_print('Channel Msg Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Private Message', on_highlight_notification, userdata='PVT') hexchat.hook_print('Private Message", "Message', on_highlight_notification, userdata='PVT') hexchat.hook_print('Private Message to Dialog', on_highlight_notification, userdata='PVT') hexchat.hook_print('Private Action to Dialog',", "from os import path import dbus import hexchat __module_name__ = 'highlights_notifications' __module_description__ =", "logging.info('HexChat notification plugin starting ==============================') server_start() hexchat.prnt('{}, version {}'.format(__module_name__, __module_version__)) logging.info('Setting common notifications", "session_bus.get_object('com.skontar.HexChat', '/com/skontar/HexChat') interface = dbus.Interface(proxy, dbus_interface='com.skontar.HexChat') logging.debug('DBus interface Success') return interface except dbus.exceptions.DBusException:", "message to Notification Server through DBus') interface.create_notification(nickname, network, channel, title, text, message_type) except", "None logging.info('HexChat notification plugin starting ==============================') server_start() hexchat.prnt('{}, version {}'.format(__module_name__, __module_version__)) logging.info('Setting common", "logging.info('Sending Quit message to Notification Server') interface.quit() except (AttributeError, dbus.exceptions.DBusException): logging.warning('Quit message to", "hexchat.hook_print('Private Message', on_highlight_notification, userdata='PVT') hexchat.hook_print('Private Message to Dialog', on_highlight_notification, userdata='PVT') hexchat.hook_print('Private Action to", "network, channel, repr(str(nickname))) logging.debug('Application details: [%s | %s]', win_status, active_channel) logging.debug('Message type: \"%s\"',", "| %s]', win_status, active_channel) logging.debug('Message type: \"%s\"', message_type) logging.debug('Message: %s', repr(text)) # Ignore", "Server') interface.quit() except (AttributeError, dbus.exceptions.DBusException): logging.warning('Quit message to Notification Server failed') logging.info('Explicitly quit')", "hexchat.get_info('channel') nickname = word[0] nickname = re.sub(r'^\\x03\\d+', '', nickname) # Remove color text", "win_status == 'active' and channel == active_channel: logging.info('Not showing notifications as channel is", "subprocess.Popen('python3 {}'.format(NOTIFICATION_SERVER), shell=True) def get_dbus_interface(): logging.info('Getting DBus interface for Notification Server') try: session_bus", "FORMAT = '%(asctime)-24s %(levelname)-9s %(message)s' logging.basicConfig(filename=path.expanduser(LOG), format=FORMAT, level=logging.DEBUG) def handle_exception(exc_type, exc_value, exc_traceback): logging.error('Uncaught", "logging.info('New notification [%s | %s | %s]', network, channel, repr(str(nickname))) logging.debug('Application details: [%s", "hang # on exit. Only other workaround I have found was to raise", "interface it seems to hang # on exit. Only other workaround I have", "import path import dbus import hexchat __module_name__ = 'highlights_notifications' __module_description__ = 'Better notifications", "Success') return interface except dbus.exceptions.DBusException: logging.debug('DBus interface Fail') server_start() return None def on_focus_tab(word,", "= 'highlights_notifications' __module_description__ = 'Better notifications with actions' __module_version__ = '1.1' NOTIFICATION_SERVER =", "channel) else: title = 'Private message from: {} ({})'.format(nickname, network) logging.info('New notification [%s", "userdata='HLT') hexchat.hook_print('Channel Msg Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Private Message', on_highlight_notification, userdata='PVT') hexchat.hook_print('Private Message to", "active') return hexchat.EAT_NONE if interface is None: logging.debug('No DBus interface prepared') interface =", "tried to unhook it just before exit, but that did # not work", "logging.debug('Message: %s', repr(text)) # Ignore notification if window is active and active channel", "server ending') hexchat.prnt('Unloading {}, version {}'.format(__module_name__, __module_version__)) logging.info('Setting common notifications to normal') hexchat.command('set", "%s]', win_status, active_channel) logging.debug('Message type: \"%s\"', message_type) logging.debug('Message: %s', repr(text)) # Ignore notification", "on exit. Only other workaround I have found was to raise an Exception,", "logging.debug('DBus interface Success') return interface except dbus.exceptions.DBusException: logging.debug('DBus interface Fail') server_start() return None", "= 'Private message from: {} ({})'.format(nickname, network) logging.info('New notification [%s | %s |", "raise an Exception, but that stopped to # work when I hooked `sys.excepthook`.", "before exit, but that did # not work either. I find the proper", "more useful than ability to restart # plugin. exit(1) active_channel = None win_status", "to hang # on exit. Only other workaround I have found was to", "| %s | %s]', network, channel, repr(str(nickname))) logging.debug('Application details: [%s | %s]', win_status,", "Server') try: session_bus = dbus.SessionBus() proxy = session_bus.get_object('com.skontar.HexChat', '/com/skontar/HexChat') interface = dbus.Interface(proxy, dbus_interface='com.skontar.HexChat')", "better way, as if the plugin used DBus interface it seems to hang", "hexchat.hook_unload(on_unload) hexchat.hook_print('Channel Action Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Channel Msg Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Private Message',", "{}'.format(NOTIFICATION_SERVER), shell=True) def get_dbus_interface(): logging.info('Getting DBus interface for Notification Server') try: session_bus =", "was to raise an Exception, but that stopped to # work when I", "repr(str(nickname))) logging.debug('Application details: [%s | %s]', win_status, active_channel) logging.debug('Message type: \"%s\"', message_type) logging.debug('Message:", "Only other workaround I have found was to raise an Exception, but that", "import re import subprocess import sys from os import path import dbus import", "logging more useful than ability to restart # plugin. exit(1) active_channel = None", "with actions. HexChat Python Interface: http://hexchat.readthedocs.io/en/latest/script_python.html IRC String Formatting: https://github.com/myano/jenni/wiki/IRC-String-Formatting \"\"\" import logging", "hexchat.command('set input_balloon_priv 1') try: logging.info('Sending Quit message to Notification Server') interface.quit() except (AttributeError,", "{} ({})'.format(nickname, network) logging.info('New notification [%s | %s | %s]', network, channel, repr(str(nickname)))", "hexchat.get_info('network') channel = hexchat.get_info('channel') nickname = word[0] nickname = re.sub(r'^\\x03\\d+', '', nickname) #", "to Notification Server fail') logging.warning('Notification fallback') hexchat.command('TRAY -b \"{}\" {}'.format(title, text)) else: try:", "notification plugin starting ==============================') server_start() hexchat.prnt('{}, version {}'.format(__module_name__, __module_version__)) logging.info('Setting common notifications to", "word[1] message_type = userdata if message_type == 'HLT': title = 'Highlighted message from:", "input_balloon_hilight 1') hexchat.command('set input_balloon_priv 1') try: logging.info('Sending Quit message to Notification Server') interface.quit()", "logging.info('Sending message to Notification Server through DBus') interface.create_notification(nickname, network, channel, title, text, message_type)", "used DBus interface it seems to hang # on exit. Only other workaround", "userdata): global interface win_status = hexchat.get_info('win_status') network = hexchat.get_info('network') channel = hexchat.get_info('channel') nickname", "it seems to hang # on exit. Only other workaround I have found", "\"\"\" Plugin for better notifications with actions. HexChat Python Interface: http://hexchat.readthedocs.io/en/latest/script_python.html IRC String", "Notification Server') try: session_bus = dbus.SessionBus() proxy = session_bus.get_object('com.skontar.HexChat', '/com/skontar/HexChat') interface = dbus.Interface(proxy,", "plugin cannot be restarted. # However, I did not find a better way,", "win_status = hexchat.get_info('win_status') network = hexchat.get_info('network') channel = hexchat.get_info('channel') nickname = word[0] nickname", "prepared') interface = get_dbus_interface() if interface is None: logging.warning('DBus connection to Notification Server", "find a better way, as if the plugin used DBus interface it seems", "\"\"\" import logging import re import subprocess import sys from os import path", "'Private message from: {} ({})'.format(nickname, network) logging.info('New notification [%s | %s | %s]',", "that stopped to # work when I hooked `sys.excepthook`. I have tried to", "color text = word[1] message_type = userdata if message_type == 'HLT': title =", "logging.info('Explicitly quit') # Unfortunately, this also kills whole HexChat, so the plugin cannot", "`sys.excepthook`. I have tried to unhook it just before exit, but that did", "userdata='HLT') hexchat.hook_print('Private Message', on_highlight_notification, userdata='PVT') hexchat.hook_print('Private Message to Dialog', on_highlight_notification, userdata='PVT') hexchat.hook_print('Private Action", "__module_version__)) logging.info('Setting common notifications to normal') hexchat.command('set input_balloon_hilight 1') hexchat.command('set input_balloon_priv 1') try:", "hexchat.EAT_NONE if interface is None: logging.debug('No DBus interface prepared') interface = get_dbus_interface() if", "notification server ending') hexchat.prnt('Unloading {}, version {}'.format(__module_name__, __module_version__)) logging.info('Setting common notifications to normal')", "logging.debug('DBus interface Fail') server_start() return None def on_focus_tab(word, word_eol, userdata): global active_channel active_channel", "message to Notification Server fail') logging.warning('Notification fallback') hexchat.command('TRAY -b \"{}\" {}'.format(title, text)) interface", "to Notification Server through DBus') interface.create_notification(nickname, network, channel, title, text, message_type) except dbus.exceptions.DBusException:", "Quit message to Notification Server') interface.quit() except (AttributeError, dbus.exceptions.DBusException): logging.warning('Quit message to Notification", "Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Channel Msg Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Private Message', on_highlight_notification, userdata='PVT') hexchat.hook_print('Private", "have found was to raise an Exception, but that stopped to # work", "type: \"%s\"', message_type) logging.debug('Message: %s', repr(text)) # Ignore notification if window is active", "\"{}\" {}'.format(title, text)) else: try: logging.info('Sending message to Notification Server through DBus') interface.create_notification(nickname,", "better notifications with actions. HexChat Python Interface: http://hexchat.readthedocs.io/en/latest/script_python.html IRC String Formatting: https://github.com/myano/jenni/wiki/IRC-String-Formatting \"\"\"", "hexchat.hook_print('Channel Action Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Channel Msg Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Private Message', on_highlight_notification,", "active_channel = None win_status = None interface = None logging.info('HexChat notification plugin starting", "try: session_bus = dbus.SessionBus() proxy = session_bus.get_object('com.skontar.HexChat', '/com/skontar/HexChat') interface = dbus.Interface(proxy, dbus_interface='com.skontar.HexChat') logging.debug('DBus", "global interface win_status = hexchat.get_info('win_status') network = hexchat.get_info('network') channel = hexchat.get_info('channel') nickname =", "message_type) except dbus.exceptions.DBusException: logging.warning('DBus message to Notification Server fail') logging.warning('Notification fallback') hexchat.command('TRAY -b", "also kills whole HexChat, so the plugin cannot be restarted. # However, I", "already active') return hexchat.EAT_NONE if interface is None: logging.debug('No DBus interface prepared') interface", "text)) interface = get_dbus_interface() return hexchat.EAT_NONE def on_unload(userdata): global interface logging.info('HexChat notification server", "actions. HexChat Python Interface: http://hexchat.readthedocs.io/en/latest/script_python.html IRC String Formatting: https://github.com/myano/jenni/wiki/IRC-String-Formatting \"\"\" import logging import", "Notification Server fail') logging.warning('Notification fallback') hexchat.command('TRAY -b \"{}\" {}'.format(title, text)) else: try: logging.info('Sending", "I have found was to raise an Exception, but that stopped to #", "def handle_exception(exc_type, exc_value, exc_traceback): logging.error('Uncaught exception', exc_info=(exc_type, exc_value, exc_traceback)) sys.__excepthook__(exc_type, exc_value, exc_traceback) sys.excepthook", "fail') logging.warning('Notification fallback') hexchat.command('TRAY -b \"{}\" {}'.format(title, text)) else: try: logging.info('Sending message to", "is None: logging.debug('No DBus interface prepared') interface = get_dbus_interface() if interface is None:", "hexchat.command('TRAY -b \"{}\" {}'.format(title, text)) else: try: logging.info('Sending message to Notification Server through", "a better way, as if the plugin used DBus interface it seems to", "have tried to unhook it just before exit, but that did # not", "'active' and channel == active_channel: logging.info('Not showing notifications as channel is already active')", "on_focus_tab) hexchat.hook_unload(on_unload) hexchat.hook_print('Channel Action Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Channel Msg Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Private", "for better notifications with actions. HexChat Python Interface: http://hexchat.readthedocs.io/en/latest/script_python.html IRC String Formatting: https://github.com/myano/jenni/wiki/IRC-String-Formatting", "= userdata if message_type == 'HLT': title = 'Highlighted message from: {} ({})'.format(nickname,", "to restart # plugin. exit(1) active_channel = None win_status = None interface =", "Plugin for better notifications with actions. HexChat Python Interface: http://hexchat.readthedocs.io/en/latest/script_python.html IRC String Formatting:", "is None: logging.warning('DBus connection to Notification Server fail') logging.warning('Notification fallback') hexchat.command('TRAY -b \"{}\"", "on_unload(userdata): global interface logging.info('HexChat notification server ending') hexchat.prnt('Unloading {}, version {}'.format(__module_name__, __module_version__)) logging.info('Setting", "I have tried to unhook it just before exit, but that did #", "hexchat.hook_print('Channel Msg Hilight', on_highlight_notification, userdata='HLT') hexchat.hook_print('Private Message', on_highlight_notification, userdata='PVT') hexchat.hook_print('Private Message to Dialog',", "ending') hexchat.prnt('Unloading {}, version {}'.format(__module_name__, __module_version__)) logging.info('Setting common notifications to normal') hexchat.command('set input_balloon_hilight", "= '%(asctime)-24s %(levelname)-9s %(message)s' logging.basicConfig(filename=path.expanduser(LOG), format=FORMAT, level=logging.DEBUG) def handle_exception(exc_type, exc_value, exc_traceback): logging.error('Uncaught exception',", "{} ({})'.format(nickname, channel) else: title = 'Private message from: {} ({})'.format(nickname, network) logging.info('New", "Notification Server failed') logging.info('Explicitly quit') # Unfortunately, this also kills whole HexChat, so", "I did not find a better way, as if the plugin used DBus", "fallback') hexchat.command('TRAY -b \"{}\" {}'.format(title, text)) else: try: logging.info('Sending message to Notification Server", "kills whole HexChat, so the plugin cannot be restarted. # However, I did", "==============================') server_start() hexchat.prnt('{}, version {}'.format(__module_name__, __module_version__)) logging.info('Setting common notifications to suspended') hexchat.command('set input_balloon_hilight", "def server_start(): logging.info('Starting server') subprocess.Popen('python3 {}'.format(NOTIFICATION_SERVER), shell=True) def get_dbus_interface(): logging.info('Getting DBus interface for", "text)) else: try: logging.info('Sending message to Notification Server through DBus') interface.create_notification(nickname, network, channel,", "title, text, message_type) except dbus.exceptions.DBusException: logging.warning('DBus message to Notification Server fail') logging.warning('Notification fallback')", "to unhook it just before exit, but that did # not work either." ]
[]
[ "= 'gzip' elif hints.compression == 'BZIP': pandas_options['compression'] = 'bz2' else: cant_handle_hint(fail_if_cant_handle_hint, 'compression', hints)", "valid_datetimeformat): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformat', hints) quiet_remove(unhandled_hints, 'datetimeformat') if hints.timeonlyformat != 'HH24:MI:SS': cant_handle_hint(fail_if_cant_handle_hint, 'timeonlyformat', hints)", "== 'DD-MM-YYYY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f' else: pandas_options['date_format'] =", "specify things # without a timezone delimiter just fine. # # Unfortunately Python/Pandas", "being moved is affected by whatever limitation... if (hints.datetimeformattz not in (f\"{hints.dateformat} HH24:MI:SSOF\",", "# which is the pandas default. pass elif hints.compression == 'GZIP': pandas_options['compression'] =", "Boo. # # $ python3 # >>> import pytz # >>> us_eastern =", "hints.datetimeformattz == hints.datetimeformat: # BigQuery requires that timezone offsets have a colon; #", "= '%m-%d-%Y %H:%M:%S.%f' else: pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f%z' elif hints.dateformat == 'DD-MM-YYYY': if", "Python (and thus Pandas) doesn't support adding the # colon with strftime. However,", "and time together :( # # might be nice someday to only emit", "34, 56, 789012)) # .strftime('%Y-%m-%d %H:%M:%S.%f') # '2000-01-02 12:34:56.789012' # >>> # #", "== 'BZIP': pandas_options['compression'] = 'bz2' else: cant_handle_hint(fail_if_cant_handle_hint, 'compression', hints) quiet_remove(unhandled_hints, 'compression') if hints.quoting", "pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f%z' elif hints.dateformat == 'MM/DD/YY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format']", "moved is affected by whatever limitation... if (hints.datetimeformattz not in (f\"{hints.dateformat} HH24:MI:SSOF\", f\"{hints.dateformat}", "logger = logging.getLogger(__name__) def pandas_to_csv_options(records_format: DelimitedRecordsFormat, unhandled_hints: Set[str], processing_instructions: ProcessingInstructions) -> Dict[str, object]:", "csv.QUOTE_NONNUMERIC else: _assert_never(hints.quoting) quiet_remove(unhandled_hints, 'quoting') pandas_options['doublequote'] = hints.doublequote quiet_remove(unhandled_hints, 'doublequote') pandas_options['quotechar'] = hints.quotechar", "hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f' else: pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f%z' else:", "= hints.header_row quiet_remove(unhandled_hints, 'header-row') if hints.dateformat is None: if hints.datetimeformattz == hints.datetimeformat: #", "hints.escape quiet_remove(unhandled_hints, 'escape') pandas_options['header'] = hints.header_row quiet_remove(unhandled_hints, 'header-row') if hints.dateformat is None: if", "# is UTC format. Boo. # # $ python3 # >>> import pytz", "is the pandas default. pass elif hints.compression == 'GZIP': pandas_options['compression'] = 'gzip' elif", "elif hints.compression == 'BZIP': pandas_options['compression'] = 'bz2' else: cant_handle_hint(fail_if_cant_handle_hint, 'compression', hints) quiet_remove(unhandled_hints, 'compression')", "is None: # hints['compression']=None will output an uncompressed csv, # which is the", "== hints.datetimeformat: # BigQuery requires that timezone offsets have a colon; # Python", "# Python (and thus Pandas) doesn't support adding the # colon with strftime.", "csv.QUOTE_NONE elif hints.quoting == 'all': pandas_options['quoting'] = csv.QUOTE_ALL elif hints.quoting == 'minimal': pandas_options['quoting']", "elif hints.quoting == 'nonnumeric': pandas_options['quoting'] = csv.QUOTE_NONNUMERIC else: _assert_never(hints.quoting) quiet_remove(unhandled_hints, 'quoting') pandas_options['doublequote'] =", "..processing_instructions import ProcessingInstructions from ..records_format import DelimitedRecordsFormat from records_mover.mover_types import _assert_never import logging", "in # is UTC format. Boo. # # $ python3 # >>> import", "delimiter just fine. # # Unfortunately Python/Pandas will drop the timezone info #", "format. Boo. # # $ python3 # >>> import pytz # >>> us_eastern", "cant_handle_hint(fail_if_cant_handle_hint, 'timeonlyformat', hints) quiet_remove(unhandled_hints, 'timeonlyformat') pandas_options['sep'] = hints.field_delimiter quiet_remove(unhandled_hints, 'field-delimiter') pandas_options['line_terminator'] = hints.record_terminator", "%H:%M:%S.%f' else: pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f%z' else: cant_handle_hint(fail_if_cant_handle_hint, 'dateformat', hints) quiet_remove(unhandled_hints, 'dateformat') #", "Set, Dict logger = logging.getLogger(__name__) def pandas_to_csv_options(records_format: DelimitedRecordsFormat, unhandled_hints: Set[str], processing_instructions: ProcessingInstructions) ->", ">>> import datetime # >>> us_eastern.localize(datetime.datetime(2000, 1, 2, 12, 34, 56, 789012)) #", "'datetimeformat', hints) quiet_remove(unhandled_hints, 'datetimeformat') if hints.timeonlyformat != 'HH24:MI:SS': cant_handle_hint(fail_if_cant_handle_hint, 'timeonlyformat', hints) quiet_remove(unhandled_hints, 'timeonlyformat')", "hints.datetimeformat: pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f' else: pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f%z' else: cant_handle_hint(fail_if_cant_handle_hint, 'dateformat',", "# Unfortunately Python/Pandas will drop the timezone info # instead of converting the", "pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat == 'MM-DD-YYYY':", "# https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_csv.html hints = records_format.\\ validate(fail_if_cant_handle_hint=processing_instructions.fail_if_cant_handle_hint) fail_if_cant_handle_hint = processing_instructions.fail_if_cant_handle_hint pandas_options: Dict[str, object] =", "56, 789012)) # .strftime('%Y-%m-%d %H:%M:%S.%f') # '2000-01-02 12:34:56.789012' # >>> # # https://github.com/bluelabsio/records-mover/issues/95", "hints) quiet_remove(unhandled_hints, 'dateformat') # pandas can't seem to export a date and time", "support adding the # colon with strftime. However, we can specify things #", "import datetime # >>> us_eastern.localize(datetime.datetime(2000, 1, 2, 12, 34, 56, 789012)) # .strftime('%Y-%m-%d", "# hints['compression']=None will output an uncompressed csv, # which is the pandas default.", "'%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat == 'YYYY-MM-DD': if hints.datetimeformattz", "elif hints.quoting == 'minimal': pandas_options['quoting'] = csv.QUOTE_MINIMAL elif hints.quoting == 'nonnumeric': pandas_options['quoting'] =", "== 'minimal': pandas_options['quoting'] = csv.QUOTE_MINIMAL elif hints.quoting == 'nonnumeric': pandas_options['quoting'] = csv.QUOTE_NONNUMERIC else:", "is UTC format. Boo. # # $ python3 # >>> import pytz #", "is None: if hints.datetimeformattz == hints.datetimeformat: # BigQuery requires that timezone offsets have", "= '%m-%d-%Y %H:%M:%S.%f%z' elif hints.dateformat == 'DD-MM-YYYY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] =", "_assert_never(hints.quoting) quiet_remove(unhandled_hints, 'quoting') pandas_options['doublequote'] = hints.doublequote quiet_remove(unhandled_hints, 'doublequote') pandas_options['quotechar'] = hints.quotechar quiet_remove(unhandled_hints, 'quotechar')", "# might be nice someday to only emit the errors if the actual", "hints.quoting is None: pandas_options['quoting'] = csv.QUOTE_NONE elif hints.quoting == 'all': pandas_options['quoting'] = csv.QUOTE_ALL", "HH:MIOF\", f\"{hints.dateformat} HH:MI\", f\"{hints.dateformat} HH24:MIOF\", f\"{hints.dateformat} HH24:MI\")): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformattz', hints) quiet_remove(unhandled_hints, 'datetimeformattz') valid_datetimeformat", "= hints.encoding quiet_remove(unhandled_hints, 'encoding') if hints.compression is None: # hints['compression']=None will output an", "'minimal': pandas_options['quoting'] = csv.QUOTE_MINIMAL elif hints.quoting == 'nonnumeric': pandas_options['quoting'] = csv.QUOTE_NONNUMERIC else: _assert_never(hints.quoting)", "pandas_options['quoting'] = csv.QUOTE_MINIMAL elif hints.quoting == 'nonnumeric': pandas_options['quoting'] = csv.QUOTE_NONNUMERIC else: _assert_never(hints.quoting) quiet_remove(unhandled_hints,", "if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z'", "validate(fail_if_cant_handle_hint=processing_instructions.fail_if_cant_handle_hint) fail_if_cant_handle_hint = processing_instructions.fail_if_cant_handle_hint pandas_options: Dict[str, object] = {} pandas_options['encoding'] = hints.encoding quiet_remove(unhandled_hints,", "hints.timeonlyformat != 'HH24:MI:SS': cant_handle_hint(fail_if_cant_handle_hint, 'timeonlyformat', hints) quiet_remove(unhandled_hints, 'timeonlyformat') pandas_options['sep'] = hints.field_delimiter quiet_remove(unhandled_hints, 'field-delimiter')", "quiet_remove(unhandled_hints, 'header-row') if hints.dateformat is None: if hints.datetimeformattz == hints.datetimeformat: # BigQuery requires", "quiet_remove(unhandled_hints, 'escape') pandas_options['header'] = hints.header_row quiet_remove(unhandled_hints, 'header-row') if hints.dateformat is None: if hints.datetimeformattz", "2, 12, 34, 56, 789012)) # .strftime('%Y-%m-%d %H:%M:%S.%f') # '2000-01-02 12:34:56.789012' # >>>", "else: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat == 'YYYY-MM-DD': if hints.datetimeformattz == hints.datetimeformat:", "%H:%M:%S.%f%z' elif hints.dateformat == 'MM/DD/YY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f'", "%H:%M:%S.%f%z' elif hints.dateformat == 'MM-DD-YYYY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f'", "'%d-%m-%Y %H:%M:%S.%f' else: pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f%z' elif hints.dateformat == 'MM/DD/YY': if hints.datetimeformattz", "$ python3 # >>> import pytz # >>> us_eastern = pytz.timezone('US/Eastern') # >>>", "hints.dateformat == 'MM-DD-YYYY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f' else: pandas_options['date_format']", "elif hints.quoting == 'all': pandas_options['quoting'] = csv.QUOTE_ALL elif hints.quoting == 'minimal': pandas_options['quoting'] =", "'%m/%d/%y %H:%M:%S.%f' else: pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f%z' else: cant_handle_hint(fail_if_cant_handle_hint, 'dateformat', hints) quiet_remove(unhandled_hints, 'dateformat')", "pandas_options['quotechar'] = hints.quotechar quiet_remove(unhandled_hints, 'quotechar') if hints.escape is None: pass else: pandas_options['escapechar'] =", "pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f%z' else: cant_handle_hint(fail_if_cant_handle_hint, 'dateformat', hints) quiet_remove(unhandled_hints, 'dateformat') # pandas can't", "logging.getLogger(__name__) def pandas_to_csv_options(records_format: DelimitedRecordsFormat, unhandled_hints: Set[str], processing_instructions: ProcessingInstructions) -> Dict[str, object]: # https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_csv.html", "quiet_remove from ..delimited import cant_handle_hint from ..processing_instructions import ProcessingInstructions from ..records_format import DelimitedRecordsFormat", "HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH:MIOF\", f\"{hints.dateformat} HH:MI\", f\"{hints.dateformat} HH24:MIOF\", f\"{hints.dateformat} HH24:MI\")): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformattz',", "import ProcessingInstructions from ..records_format import DelimitedRecordsFormat from records_mover.mover_types import _assert_never import logging from", "elif hints.dateformat == 'DD-MM-YYYY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f' else:", "the time, as BigQuery assumes what it gets in # is UTC format.", "'datetimeformattz', hints) quiet_remove(unhandled_hints, 'datetimeformattz') valid_datetimeformat = [ f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH24:MI\",", "elif hints.dateformat == 'MM-DD-YYYY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f' else:", "together :( # # might be nice someday to only emit the errors", "python3 # >>> import pytz # >>> us_eastern = pytz.timezone('US/Eastern') # >>> import", "HH:MI:SS\", f\"{hints.dateformat} HH24:MI\", f\"{hints.dateformat} HH:MI\", ] if (hints.datetimeformat not in valid_datetimeformat): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformat',", "None: # hints['compression']=None will output an uncompressed csv, # which is the pandas", "pandas_options['compression'] = 'bz2' else: cant_handle_hint(fail_if_cant_handle_hint, 'compression', hints) quiet_remove(unhandled_hints, 'compression') if hints.quoting is None:", "can specify things # without a timezone delimiter just fine. # # Unfortunately", "UTC format. Boo. # # $ python3 # >>> import pytz # >>>", "f\"{hints.dateformat} HH24:MI\", f\"{hints.dateformat} HH:MI\", ] if (hints.datetimeformat not in valid_datetimeformat): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformat', hints)", "%H:%M:%S.%f%z' else: cant_handle_hint(fail_if_cant_handle_hint, 'dateformat', hints) quiet_remove(unhandled_hints, 'dateformat') # pandas can't seem to export", "'timeonlyformat', hints) quiet_remove(unhandled_hints, 'timeonlyformat') pandas_options['sep'] = hints.field_delimiter quiet_remove(unhandled_hints, 'field-delimiter') pandas_options['line_terminator'] = hints.record_terminator quiet_remove(unhandled_hints,", "'MM-DD-YYYY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f' else: pandas_options['date_format'] = '%m-%d-%Y", "] if (hints.datetimeformat not in valid_datetimeformat): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformat', hints) quiet_remove(unhandled_hints, 'datetimeformat') if hints.timeonlyformat", "HH24:MI:SSOF\", f\"{hints.dateformat} HH:MI:SSOF\", f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH:MIOF\", f\"{hints.dateformat} HH:MI\", f\"{hints.dateformat} HH24:MIOF\",", "HH24:MI\")): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformattz', hints) quiet_remove(unhandled_hints, 'datetimeformattz') valid_datetimeformat = [ f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\",", "colon; # Python (and thus Pandas) doesn't support adding the # colon with", "# # https://github.com/bluelabsio/records-mover/issues/95 pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z' elif", "'timeonlyformat') pandas_options['sep'] = hints.field_delimiter quiet_remove(unhandled_hints, 'field-delimiter') pandas_options['line_terminator'] = hints.record_terminator quiet_remove(unhandled_hints, 'record-terminator') return pandas_options", "unhandled_hints: Set[str], processing_instructions: ProcessingInstructions) -> Dict[str, object]: # https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_csv.html hints = records_format.\\ validate(fail_if_cant_handle_hint=processing_instructions.fail_if_cant_handle_hint)", "== 'MM/DD/YY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f' else: pandas_options['date_format'] =", "%H:%M:%S.%f' else: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat == 'MM-DD-YYYY': if hints.datetimeformattz ==", "= logging.getLogger(__name__) def pandas_to_csv_options(records_format: DelimitedRecordsFormat, unhandled_hints: Set[str], processing_instructions: ProcessingInstructions) -> Dict[str, object]: #", "pandas_to_csv_options(records_format: DelimitedRecordsFormat, unhandled_hints: Set[str], processing_instructions: ProcessingInstructions) -> Dict[str, object]: # https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_csv.html hints =", "by whatever limitation... if (hints.datetimeformattz not in (f\"{hints.dateformat} HH24:MI:SSOF\", f\"{hints.dateformat} HH:MI:SSOF\", f\"{hints.dateformat} HH24:MI:SS\",", "elif hints.dateformat == 'YYYY-MM-DD': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f' else:", "actual data # being moved is affected by whatever limitation... if (hints.datetimeformattz not", "# pandas can't seem to export a date and time together :( #", "datetime # >>> us_eastern.localize(datetime.datetime(2000, 1, 2, 12, 34, 56, 789012)) # .strftime('%Y-%m-%d %H:%M:%S.%f')", "without a timezone delimiter just fine. # # Unfortunately Python/Pandas will drop the", "typing import Set, Dict logger = logging.getLogger(__name__) def pandas_to_csv_options(records_format: DelimitedRecordsFormat, unhandled_hints: Set[str], processing_instructions:", "converting the timestamp to UTC. This # corrupts the time, as BigQuery assumes", "'2000-01-02 12:34:56.789012' # >>> # # https://github.com/bluelabsio/records-mover/issues/95 pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format']", "'%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat == 'MM-DD-YYYY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%m-%d-%Y", "drop the timezone info # instead of converting the timestamp to UTC. This", "= '%m/%d/%y %H:%M:%S.%f' else: pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f%z' else: cant_handle_hint(fail_if_cant_handle_hint, 'dateformat', hints) quiet_remove(unhandled_hints,", "output an uncompressed csv, # which is the pandas default. pass elif hints.compression", "from ..processing_instructions import ProcessingInstructions from ..records_format import DelimitedRecordsFormat from records_mover.mover_types import _assert_never import", "not in valid_datetimeformat): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformat', hints) quiet_remove(unhandled_hints, 'datetimeformat') if hints.timeonlyformat != 'HH24:MI:SS': cant_handle_hint(fail_if_cant_handle_hint,", "else: pandas_options['escapechar'] = hints.escape quiet_remove(unhandled_hints, 'escape') pandas_options['header'] = hints.header_row quiet_remove(unhandled_hints, 'header-row') if hints.dateformat", "= {} pandas_options['encoding'] = hints.encoding quiet_remove(unhandled_hints, 'encoding') if hints.compression is None: # hints['compression']=None", "'nonnumeric': pandas_options['quoting'] = csv.QUOTE_NONNUMERIC else: _assert_never(hints.quoting) quiet_remove(unhandled_hints, 'quoting') pandas_options['doublequote'] = hints.doublequote quiet_remove(unhandled_hints, 'doublequote')", "HH24:MIOF\", f\"{hints.dateformat} HH24:MI\")): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformattz', hints) quiet_remove(unhandled_hints, 'datetimeformattz') valid_datetimeformat = [ f\"{hints.dateformat} HH24:MI:SS\",", "from records_mover.mover_types import _assert_never import logging from typing import Set, Dict logger =", "cant_handle_hint(fail_if_cant_handle_hint, 'compression', hints) quiet_remove(unhandled_hints, 'compression') if hints.quoting is None: pandas_options['quoting'] = csv.QUOTE_NONE elif", "cant_handle_hint from ..processing_instructions import ProcessingInstructions from ..records_format import DelimitedRecordsFormat from records_mover.mover_types import _assert_never", "else: pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f%z' else: cant_handle_hint(fail_if_cant_handle_hint, 'dateformat', hints) quiet_remove(unhandled_hints, 'dateformat') # pandas", "corrupts the time, as BigQuery assumes what it gets in # is UTC", "object] = {} pandas_options['encoding'] = hints.encoding quiet_remove(unhandled_hints, 'encoding') if hints.compression is None: #", "%H:%M:%S.%f%z' elif hints.dateformat == 'DD-MM-YYYY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f'", "UTC. This # corrupts the time, as BigQuery assumes what it gets in", "pandas_options['quoting'] = csv.QUOTE_NONE elif hints.quoting == 'all': pandas_options['quoting'] = csv.QUOTE_ALL elif hints.quoting ==", "Set[str], processing_instructions: ProcessingInstructions) -> Dict[str, object]: # https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_csv.html hints = records_format.\\ validate(fail_if_cant_handle_hint=processing_instructions.fail_if_cant_handle_hint) fail_if_cant_handle_hint", "Python/Pandas will drop the timezone info # instead of converting the timestamp to", "# corrupts the time, as BigQuery assumes what it gets in # is", "..records_format import DelimitedRecordsFormat from records_mover.mover_types import _assert_never import logging from typing import Set,", "= records_format.\\ validate(fail_if_cant_handle_hint=processing_instructions.fail_if_cant_handle_hint) fail_if_cant_handle_hint = processing_instructions.fail_if_cant_handle_hint pandas_options: Dict[str, object] = {} pandas_options['encoding'] =", "pandas_options['header'] = hints.header_row quiet_remove(unhandled_hints, 'header-row') if hints.dateformat is None: if hints.datetimeformattz == hints.datetimeformat:", "seem to export a date and time together :( # # might be", "import pytz # >>> us_eastern = pytz.timezone('US/Eastern') # >>> import datetime # >>>", "assumes what it gets in # is UTC format. Boo. # # $", "if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f' else: pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f%z'", "if hints.quoting is None: pandas_options['quoting'] = csv.QUOTE_NONE elif hints.quoting == 'all': pandas_options['quoting'] =", "else: pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f%z' elif hints.dateformat == 'MM/DD/YY': if hints.datetimeformattz == hints.datetimeformat:", "'compression') if hints.quoting is None: pandas_options['quoting'] = csv.QUOTE_NONE elif hints.quoting == 'all': pandas_options['quoting']", "hints.quoting == 'all': pandas_options['quoting'] = csv.QUOTE_ALL elif hints.quoting == 'minimal': pandas_options['quoting'] = csv.QUOTE_MINIMAL", "HH:MI\", f\"{hints.dateformat} HH24:MIOF\", f\"{hints.dateformat} HH24:MI\")): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformattz', hints) quiet_remove(unhandled_hints, 'datetimeformattz') valid_datetimeformat = [", "timezone info # instead of converting the timestamp to UTC. This # corrupts", "# >>> import datetime # >>> us_eastern.localize(datetime.datetime(2000, 1, 2, 12, 34, 56, 789012))", "doesn't support adding the # colon with strftime. However, we can specify things", "a colon; # Python (and thus Pandas) doesn't support adding the # colon", "pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f' else: pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f%z' else: cant_handle_hint(fail_if_cant_handle_hint, 'dateformat', hints)", "things # without a timezone delimiter just fine. # # Unfortunately Python/Pandas will", "the pandas default. pass elif hints.compression == 'GZIP': pandas_options['compression'] = 'gzip' elif hints.compression", "This # corrupts the time, as BigQuery assumes what it gets in #", "'%m/%d/%y %H:%M:%S.%f%z' else: cant_handle_hint(fail_if_cant_handle_hint, 'dateformat', hints) quiet_remove(unhandled_hints, 'dateformat') # pandas can't seem to", "processing_instructions.fail_if_cant_handle_hint pandas_options: Dict[str, object] = {} pandas_options['encoding'] = hints.encoding quiet_remove(unhandled_hints, 'encoding') if hints.compression", "'quoting') pandas_options['doublequote'] = hints.doublequote quiet_remove(unhandled_hints, 'doublequote') pandas_options['quotechar'] = hints.quotechar quiet_remove(unhandled_hints, 'quotechar') if hints.escape", "-> Dict[str, object]: # https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_csv.html hints = records_format.\\ validate(fail_if_cant_handle_hint=processing_instructions.fail_if_cant_handle_hint) fail_if_cant_handle_hint = processing_instructions.fail_if_cant_handle_hint pandas_options:", "hints.escape is None: pass else: pandas_options['escapechar'] = hints.escape quiet_remove(unhandled_hints, 'escape') pandas_options['header'] = hints.header_row", "12:34:56.789012' # >>> # # https://github.com/bluelabsio/records-mover/issues/95 pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format'] =", "'%d-%m-%Y %H:%M:%S.%f%z' elif hints.dateformat == 'MM/DD/YY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%m/%d/%y", "else: cant_handle_hint(fail_if_cant_handle_hint, 'dateformat', hints) quiet_remove(unhandled_hints, 'dateformat') # pandas can't seem to export a", "if (hints.datetimeformat not in valid_datetimeformat): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformat', hints) quiet_remove(unhandled_hints, 'datetimeformat') if hints.timeonlyformat !=", "%H:%M:%S.%f' else: pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f%z' elif hints.dateformat == 'DD-MM-YYYY': if hints.datetimeformattz ==", "object]: # https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_csv.html hints = records_format.\\ validate(fail_if_cant_handle_hint=processing_instructions.fail_if_cant_handle_hint) fail_if_cant_handle_hint = processing_instructions.fail_if_cant_handle_hint pandas_options: Dict[str, object]", "f\"{hints.dateformat} HH24:MIOF\", f\"{hints.dateformat} HH24:MI\")): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformattz', hints) quiet_remove(unhandled_hints, 'datetimeformattz') valid_datetimeformat = [ f\"{hints.dateformat}", "pandas_options['doublequote'] = hints.doublequote quiet_remove(unhandled_hints, 'doublequote') pandas_options['quotechar'] = hints.quotechar quiet_remove(unhandled_hints, 'quotechar') if hints.escape is", "HH24:MI\", f\"{hints.dateformat} HH:MI\", ] if (hints.datetimeformat not in valid_datetimeformat): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformat', hints) quiet_remove(unhandled_hints,", "with strftime. However, we can specify things # without a timezone delimiter just", "elif hints.dateformat == 'MM/DD/YY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f' else:", "else: _assert_never(hints.quoting) quiet_remove(unhandled_hints, 'quoting') pandas_options['doublequote'] = hints.doublequote quiet_remove(unhandled_hints, 'doublequote') pandas_options['quotechar'] = hints.quotechar quiet_remove(unhandled_hints,", "might be nice someday to only emit the errors if the actual data", "# >>> import pytz # >>> us_eastern = pytz.timezone('US/Eastern') # >>> import datetime", "'doublequote') pandas_options['quotechar'] = hints.quotechar quiet_remove(unhandled_hints, 'quotechar') if hints.escape is None: pass else: pandas_options['escapechar']", "hints.dateformat == 'YYYY-MM-DD': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format']", "'HH24:MI:SS': cant_handle_hint(fail_if_cant_handle_hint, 'timeonlyformat', hints) quiet_remove(unhandled_hints, 'timeonlyformat') pandas_options['sep'] = hints.field_delimiter quiet_remove(unhandled_hints, 'field-delimiter') pandas_options['line_terminator'] =", "hints.doublequote quiet_remove(unhandled_hints, 'doublequote') pandas_options['quotechar'] = hints.quotechar quiet_remove(unhandled_hints, 'quotechar') if hints.escape is None: pass", "'escape') pandas_options['header'] = hints.header_row quiet_remove(unhandled_hints, 'header-row') if hints.dateformat is None: if hints.datetimeformattz ==", "offsets have a colon; # Python (and thus Pandas) doesn't support adding the", "pandas_options: Dict[str, object] = {} pandas_options['encoding'] = hints.encoding quiet_remove(unhandled_hints, 'encoding') if hints.compression is", "# colon with strftime. However, we can specify things # without a timezone", "import _assert_never import logging from typing import Set, Dict logger = logging.getLogger(__name__) def", "f\"{hints.dateformat} HH:MI\", ] if (hints.datetimeformat not in valid_datetimeformat): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformat', hints) quiet_remove(unhandled_hints, 'datetimeformat')", "the timezone info # instead of converting the timestamp to UTC. This #", "f\"{hints.dateformat} HH24:MI\")): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformattz', hints) quiet_remove(unhandled_hints, 'datetimeformattz') valid_datetimeformat = [ f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat}", "what it gets in # is UTC format. Boo. # # $ python3", "a timezone delimiter just fine. # # Unfortunately Python/Pandas will drop the timezone", "'all': pandas_options['quoting'] = csv.QUOTE_ALL elif hints.quoting == 'minimal': pandas_options['quoting'] = csv.QUOTE_MINIMAL elif hints.quoting", "# '2000-01-02 12:34:56.789012' # >>> # # https://github.com/bluelabsio/records-mover/issues/95 pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f' else:", "= '%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat == 'YYYY-MM-DD': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] =", "'compression', hints) quiet_remove(unhandled_hints, 'compression') if hints.quoting is None: pandas_options['quoting'] = csv.QUOTE_NONE elif hints.quoting", "= '%m/%d/%y %H:%M:%S.%f%z' else: cant_handle_hint(fail_if_cant_handle_hint, 'dateformat', hints) quiet_remove(unhandled_hints, 'dateformat') # pandas can't seem", "export a date and time together :( # # might be nice someday", "# >>> us_eastern = pytz.timezone('US/Eastern') # >>> import datetime # >>> us_eastern.localize(datetime.datetime(2000, 1,", "is affected by whatever limitation... if (hints.datetimeformattz not in (f\"{hints.dateformat} HH24:MI:SSOF\", f\"{hints.dateformat} HH:MI:SSOF\",", "whatever limitation... if (hints.datetimeformattz not in (f\"{hints.dateformat} HH24:MI:SSOF\", f\"{hints.dateformat} HH:MI:SSOF\", f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat}", "(f\"{hints.dateformat} HH24:MI:SSOF\", f\"{hints.dateformat} HH:MI:SSOF\", f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH:MIOF\", f\"{hints.dateformat} HH:MI\", f\"{hints.dateformat}", "timezone offsets have a colon; # Python (and thus Pandas) doesn't support adding", "'%m-%d-%Y %H:%M:%S.%f%z' elif hints.dateformat == 'DD-MM-YYYY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%d-%m-%Y", "quiet_remove(unhandled_hints, 'dateformat') # pandas can't seem to export a date and time together", "else: pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f%z' elif hints.dateformat == 'DD-MM-YYYY': if hints.datetimeformattz == hints.datetimeformat:", "hints = records_format.\\ validate(fail_if_cant_handle_hint=processing_instructions.fail_if_cant_handle_hint) fail_if_cant_handle_hint = processing_instructions.fail_if_cant_handle_hint pandas_options: Dict[str, object] = {} pandas_options['encoding']", ".strftime('%Y-%m-%d %H:%M:%S.%f') # '2000-01-02 12:34:56.789012' # >>> # # https://github.com/bluelabsio/records-mover/issues/95 pandas_options['date_format'] = '%Y-%m-%d", "= 'bz2' else: cant_handle_hint(fail_if_cant_handle_hint, 'compression', hints) quiet_remove(unhandled_hints, 'compression') if hints.quoting is None: pandas_options['quoting']", "'header-row') if hints.dateformat is None: if hints.datetimeformattz == hints.datetimeformat: # BigQuery requires that", ">>> # # https://github.com/bluelabsio/records-mover/issues/95 pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z'", "DelimitedRecordsFormat from records_mover.mover_types import _assert_never import logging from typing import Set, Dict logger", "= csv.QUOTE_NONNUMERIC else: _assert_never(hints.quoting) quiet_remove(unhandled_hints, 'quoting') pandas_options['doublequote'] = hints.doublequote quiet_remove(unhandled_hints, 'doublequote') pandas_options['quotechar'] =", "HH:MI\", ] if (hints.datetimeformat not in valid_datetimeformat): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformat', hints) quiet_remove(unhandled_hints, 'datetimeformat') if", "hints.datetimeformat: pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f' else: pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f%z' elif hints.dateformat ==", "valid_datetimeformat = [ f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH24:MI\", f\"{hints.dateformat} HH:MI\", ] if", "12, 34, 56, 789012)) # .strftime('%Y-%m-%d %H:%M:%S.%f') # '2000-01-02 12:34:56.789012' # >>> #", "= processing_instructions.fail_if_cant_handle_hint pandas_options: Dict[str, object] = {} pandas_options['encoding'] = hints.encoding quiet_remove(unhandled_hints, 'encoding') if", "hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f' else: pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f%z' elif", "However, we can specify things # without a timezone delimiter just fine. #", "HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH24:MI\", f\"{hints.dateformat} HH:MI\", ] if (hints.datetimeformat not in valid_datetimeformat):", "to UTC. This # corrupts the time, as BigQuery assumes what it gets", "= csv.QUOTE_ALL elif hints.quoting == 'minimal': pandas_options['quoting'] = csv.QUOTE_MINIMAL elif hints.quoting == 'nonnumeric':", "'%m-%d-%Y %H:%M:%S.%f' else: pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f%z' elif hints.dateformat == 'DD-MM-YYYY': if hints.datetimeformattz", "cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformat', hints) quiet_remove(unhandled_hints, 'datetimeformat') if hints.timeonlyformat != 'HH24:MI:SS': cant_handle_hint(fail_if_cant_handle_hint, 'timeonlyformat', hints) quiet_remove(unhandled_hints,", "info # instead of converting the timestamp to UTC. This # corrupts the", "== hints.datetimeformat: pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f' else: pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f%z' elif hints.dateformat", "import quiet_remove from ..delimited import cant_handle_hint from ..processing_instructions import ProcessingInstructions from ..records_format import", "hints) quiet_remove(unhandled_hints, 'datetimeformat') if hints.timeonlyformat != 'HH24:MI:SS': cant_handle_hint(fail_if_cant_handle_hint, 'timeonlyformat', hints) quiet_remove(unhandled_hints, 'timeonlyformat') pandas_options['sep']", "== 'GZIP': pandas_options['compression'] = 'gzip' elif hints.compression == 'BZIP': pandas_options['compression'] = 'bz2' else:", "to export a date and time together :( # # might be nice", "nice someday to only emit the errors if the actual data # being", "Dict logger = logging.getLogger(__name__) def pandas_to_csv_options(records_format: DelimitedRecordsFormat, unhandled_hints: Set[str], processing_instructions: ProcessingInstructions) -> Dict[str,", "is None: pandas_options['quoting'] = csv.QUOTE_NONE elif hints.quoting == 'all': pandas_options['quoting'] = csv.QUOTE_ALL elif", "we can specify things # without a timezone delimiter just fine. # #", "= hints.doublequote quiet_remove(unhandled_hints, 'doublequote') pandas_options['quotechar'] = hints.quotechar quiet_remove(unhandled_hints, 'quotechar') if hints.escape is None:", "default. pass elif hints.compression == 'GZIP': pandas_options['compression'] = 'gzip' elif hints.compression == 'BZIP':", "f\"{hints.dateformat} HH:MI:SSOF\", f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH:MIOF\", f\"{hints.dateformat} HH:MI\", f\"{hints.dateformat} HH24:MIOF\", f\"{hints.dateformat}", "HH:MI:SSOF\", f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH:MIOF\", f\"{hints.dateformat} HH:MI\", f\"{hints.dateformat} HH24:MIOF\", f\"{hints.dateformat} HH24:MI\")):", ">>> us_eastern = pytz.timezone('US/Eastern') # >>> import datetime # >>> us_eastern.localize(datetime.datetime(2000, 1, 2,", ">>> import pytz # >>> us_eastern = pytz.timezone('US/Eastern') # >>> import datetime #", "of converting the timestamp to UTC. This # corrupts the time, as BigQuery", "# >>> # # https://github.com/bluelabsio/records-mover/issues/95 pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format'] = '%Y-%m-%d", "the errors if the actual data # being moved is affected by whatever", "# $ python3 # >>> import pytz # >>> us_eastern = pytz.timezone('US/Eastern') #", "hints) quiet_remove(unhandled_hints, 'timeonlyformat') pandas_options['sep'] = hints.field_delimiter quiet_remove(unhandled_hints, 'field-delimiter') pandas_options['line_terminator'] = hints.record_terminator quiet_remove(unhandled_hints, 'record-terminator')", "hints.encoding quiet_remove(unhandled_hints, 'encoding') if hints.compression is None: # hints['compression']=None will output an uncompressed", "f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH24:MI\", f\"{hints.dateformat} HH:MI\", ] if (hints.datetimeformat not in", "import csv from ...utils import quiet_remove from ..delimited import cant_handle_hint from ..processing_instructions import", "{} pandas_options['encoding'] = hints.encoding quiet_remove(unhandled_hints, 'encoding') if hints.compression is None: # hints['compression']=None will", "quiet_remove(unhandled_hints, 'quotechar') if hints.escape is None: pass else: pandas_options['escapechar'] = hints.escape quiet_remove(unhandled_hints, 'escape')", "pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f' else: pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f%z' elif hints.dateformat == 'DD-MM-YYYY':", "if hints.timeonlyformat != 'HH24:MI:SS': cant_handle_hint(fail_if_cant_handle_hint, 'timeonlyformat', hints) quiet_remove(unhandled_hints, 'timeonlyformat') pandas_options['sep'] = hints.field_delimiter quiet_remove(unhandled_hints,", "it gets in # is UTC format. Boo. # # $ python3 #", "timezone delimiter just fine. # # Unfortunately Python/Pandas will drop the timezone info", "instead of converting the timestamp to UTC. This # corrupts the time, as", "be nice someday to only emit the errors if the actual data #", "someday to only emit the errors if the actual data # being moved", "# being moved is affected by whatever limitation... if (hints.datetimeformattz not in (f\"{hints.dateformat}", "hints.compression == 'BZIP': pandas_options['compression'] = 'bz2' else: cant_handle_hint(fail_if_cant_handle_hint, 'compression', hints) quiet_remove(unhandled_hints, 'compression') if", "hints['compression']=None will output an uncompressed csv, # which is the pandas default. pass", "pass else: pandas_options['escapechar'] = hints.escape quiet_remove(unhandled_hints, 'escape') pandas_options['header'] = hints.header_row quiet_remove(unhandled_hints, 'header-row') if", "(hints.datetimeformattz not in (f\"{hints.dateformat} HH24:MI:SSOF\", f\"{hints.dateformat} HH:MI:SSOF\", f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH:MIOF\",", "'datetimeformattz') valid_datetimeformat = [ f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH24:MI\", f\"{hints.dateformat} HH:MI\", ]", "which is the pandas default. pass elif hints.compression == 'GZIP': pandas_options['compression'] = 'gzip'", "# without a timezone delimiter just fine. # # Unfortunately Python/Pandas will drop", "# https://github.com/bluelabsio/records-mover/issues/95 pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat", "quiet_remove(unhandled_hints, 'datetimeformattz') valid_datetimeformat = [ f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH24:MI\", f\"{hints.dateformat} HH:MI\",", "pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat == 'MM-DD-YYYY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format']", "the # colon with strftime. However, we can specify things # without a", "the actual data # being moved is affected by whatever limitation... if (hints.datetimeformattz", "pandas can't seem to export a date and time together :( # #", "pytz # >>> us_eastern = pytz.timezone('US/Eastern') # >>> import datetime # >>> us_eastern.localize(datetime.datetime(2000,", "not in (f\"{hints.dateformat} HH24:MI:SSOF\", f\"{hints.dateformat} HH:MI:SSOF\", f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH:MIOF\", f\"{hints.dateformat}", "= '%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat == 'MM-DD-YYYY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] =", "from ..delimited import cant_handle_hint from ..processing_instructions import ProcessingInstructions from ..records_format import DelimitedRecordsFormat from", "us_eastern.localize(datetime.datetime(2000, 1, 2, 12, 34, 56, 789012)) # .strftime('%Y-%m-%d %H:%M:%S.%f') # '2000-01-02 12:34:56.789012'", "cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformattz', hints) quiet_remove(unhandled_hints, 'datetimeformattz') valid_datetimeformat = [ f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat}", "'dateformat') # pandas can't seem to export a date and time together :(", "'%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat == 'MM-DD-YYYY': if hints.datetimeformattz", "errors if the actual data # being moved is affected by whatever limitation...", "..delimited import cant_handle_hint from ..processing_instructions import ProcessingInstructions from ..records_format import DelimitedRecordsFormat from records_mover.mover_types", "f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH24:MI\", f\"{hints.dateformat} HH:MI\", ] if (hints.datetimeformat not in valid_datetimeformat): cant_handle_hint(fail_if_cant_handle_hint,", "processing_instructions: ProcessingInstructions) -> Dict[str, object]: # https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_csv.html hints = records_format.\\ validate(fail_if_cant_handle_hint=processing_instructions.fail_if_cant_handle_hint) fail_if_cant_handle_hint =", "time together :( # # might be nice someday to only emit the", "import logging from typing import Set, Dict logger = logging.getLogger(__name__) def pandas_to_csv_options(records_format: DelimitedRecordsFormat,", "quiet_remove(unhandled_hints, 'datetimeformat') if hints.timeonlyformat != 'HH24:MI:SS': cant_handle_hint(fail_if_cant_handle_hint, 'timeonlyformat', hints) quiet_remove(unhandled_hints, 'timeonlyformat') pandas_options['sep'] =", "# instead of converting the timestamp to UTC. This # corrupts the time,", "= csv.QUOTE_NONE elif hints.quoting == 'all': pandas_options['quoting'] = csv.QUOTE_ALL elif hints.quoting == 'minimal':", "if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f' else: pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f%z'", ">>> us_eastern.localize(datetime.datetime(2000, 1, 2, 12, 34, 56, 789012)) # .strftime('%Y-%m-%d %H:%M:%S.%f') # '2000-01-02", "will output an uncompressed csv, # which is the pandas default. pass elif", "if hints.compression is None: # hints['compression']=None will output an uncompressed csv, # which", "# .strftime('%Y-%m-%d %H:%M:%S.%f') # '2000-01-02 12:34:56.789012' # >>> # # https://github.com/bluelabsio/records-mover/issues/95 pandas_options['date_format'] =", "csv.QUOTE_MINIMAL elif hints.quoting == 'nonnumeric': pandas_options['quoting'] = csv.QUOTE_NONNUMERIC else: _assert_never(hints.quoting) quiet_remove(unhandled_hints, 'quoting') pandas_options['doublequote']", "hints.dateformat is None: if hints.datetimeformattz == hints.datetimeformat: # BigQuery requires that timezone offsets", "csv.QUOTE_ALL elif hints.quoting == 'minimal': pandas_options['quoting'] = csv.QUOTE_MINIMAL elif hints.quoting == 'nonnumeric': pandas_options['quoting']", "is None: pass else: pandas_options['escapechar'] = hints.escape quiet_remove(unhandled_hints, 'escape') pandas_options['header'] = hints.header_row quiet_remove(unhandled_hints,", "limitation... if (hints.datetimeformattz not in (f\"{hints.dateformat} HH24:MI:SSOF\", f\"{hints.dateformat} HH:MI:SSOF\", f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\",", "= '%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat == 'MM-DD-YYYY': if", "'datetimeformat') if hints.timeonlyformat != 'HH24:MI:SS': cant_handle_hint(fail_if_cant_handle_hint, 'timeonlyformat', hints) quiet_remove(unhandled_hints, 'timeonlyformat') pandas_options['sep'] = hints.field_delimiter", "ProcessingInstructions from ..records_format import DelimitedRecordsFormat from records_mover.mover_types import _assert_never import logging from typing", "csv from ...utils import quiet_remove from ..delimited import cant_handle_hint from ..processing_instructions import ProcessingInstructions", "'gzip' elif hints.compression == 'BZIP': pandas_options['compression'] = 'bz2' else: cant_handle_hint(fail_if_cant_handle_hint, 'compression', hints) quiet_remove(unhandled_hints,", "pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f' else: pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f%z' elif hints.dateformat == 'MM/DD/YY':", "f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH:MIOF\", f\"{hints.dateformat} HH:MI\", f\"{hints.dateformat} HH24:MIOF\", f\"{hints.dateformat} HH24:MI\")): cant_handle_hint(fail_if_cant_handle_hint,", "_assert_never import logging from typing import Set, Dict logger = logging.getLogger(__name__) def pandas_to_csv_options(records_format:", "quiet_remove(unhandled_hints, 'timeonlyformat') pandas_options['sep'] = hints.field_delimiter quiet_remove(unhandled_hints, 'field-delimiter') pandas_options['line_terminator'] = hints.record_terminator quiet_remove(unhandled_hints, 'record-terminator') return", "DelimitedRecordsFormat, unhandled_hints: Set[str], processing_instructions: ProcessingInstructions) -> Dict[str, object]: # https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_csv.html hints = records_format.\\", "Pandas) doesn't support adding the # colon with strftime. However, we can specify", "pandas default. pass elif hints.compression == 'GZIP': pandas_options['compression'] = 'gzip' elif hints.compression ==", "from ..records_format import DelimitedRecordsFormat from records_mover.mover_types import _assert_never import logging from typing import", "pass elif hints.compression == 'GZIP': pandas_options['compression'] = 'gzip' elif hints.compression == 'BZIP': pandas_options['compression']", "quiet_remove(unhandled_hints, 'doublequote') pandas_options['quotechar'] = hints.quotechar quiet_remove(unhandled_hints, 'quotechar') if hints.escape is None: pass else:", "adding the # colon with strftime. However, we can specify things # without", "elif hints.compression == 'GZIP': pandas_options['compression'] = 'gzip' elif hints.compression == 'BZIP': pandas_options['compression'] =", "'DD-MM-YYYY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f' else: pandas_options['date_format'] = '%d-%m-%Y", "colon with strftime. However, we can specify things # without a timezone delimiter", "records_format.\\ validate(fail_if_cant_handle_hint=processing_instructions.fail_if_cant_handle_hint) fail_if_cant_handle_hint = processing_instructions.fail_if_cant_handle_hint pandas_options: Dict[str, object] = {} pandas_options['encoding'] = hints.encoding", "pandas_options['escapechar'] = hints.escape quiet_remove(unhandled_hints, 'escape') pandas_options['header'] = hints.header_row quiet_remove(unhandled_hints, 'header-row') if hints.dateformat is", "= '%d-%m-%Y %H:%M:%S.%f%z' elif hints.dateformat == 'MM/DD/YY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] =", "Dict[str, object]: # https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_csv.html hints = records_format.\\ validate(fail_if_cant_handle_hint=processing_instructions.fail_if_cant_handle_hint) fail_if_cant_handle_hint = processing_instructions.fail_if_cant_handle_hint pandas_options: Dict[str,", "# >>> us_eastern.localize(datetime.datetime(2000, 1, 2, 12, 34, 56, 789012)) # .strftime('%Y-%m-%d %H:%M:%S.%f') #", "if hints.escape is None: pass else: pandas_options['escapechar'] = hints.escape quiet_remove(unhandled_hints, 'escape') pandas_options['header'] =", "Unfortunately Python/Pandas will drop the timezone info # instead of converting the timestamp", ":( # # might be nice someday to only emit the errors if", "[ f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH24:MI\", f\"{hints.dateformat} HH:MI\", ] if (hints.datetimeformat not", "= [ f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH24:MI\", f\"{hints.dateformat} HH:MI\", ] if (hints.datetimeformat", "to only emit the errors if the actual data # being moved is", "hints.datetimeformat: # BigQuery requires that timezone offsets have a colon; # Python (and", "https://github.com/bluelabsio/records-mover/issues/95 pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat ==", "'bz2' else: cant_handle_hint(fail_if_cant_handle_hint, 'compression', hints) quiet_remove(unhandled_hints, 'compression') if hints.quoting is None: pandas_options['quoting'] =", "789012)) # .strftime('%Y-%m-%d %H:%M:%S.%f') # '2000-01-02 12:34:56.789012' # >>> # # https://github.com/bluelabsio/records-mover/issues/95 pandas_options['date_format']", "quiet_remove(unhandled_hints, 'compression') if hints.quoting is None: pandas_options['quoting'] = csv.QUOTE_NONE elif hints.quoting == 'all':", "'%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat == 'YYYY-MM-DD': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%Y-%m-%d", "just fine. # # Unfortunately Python/Pandas will drop the timezone info # instead", "'GZIP': pandas_options['compression'] = 'gzip' elif hints.compression == 'BZIP': pandas_options['compression'] = 'bz2' else: cant_handle_hint(fail_if_cant_handle_hint,", "# # $ python3 # >>> import pytz # >>> us_eastern = pytz.timezone('US/Eastern')", "(hints.datetimeformat not in valid_datetimeformat): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformat', hints) quiet_remove(unhandled_hints, 'datetimeformat') if hints.timeonlyformat != 'HH24:MI:SS':", "in valid_datetimeformat): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformat', hints) quiet_remove(unhandled_hints, 'datetimeformat') if hints.timeonlyformat != 'HH24:MI:SS': cant_handle_hint(fail_if_cant_handle_hint, 'timeonlyformat',", "hints.compression is None: # hints['compression']=None will output an uncompressed csv, # which is", "that timezone offsets have a colon; # Python (and thus Pandas) doesn't support", "time, as BigQuery assumes what it gets in # is UTC format. Boo.", "1, 2, 12, 34, 56, 789012)) # .strftime('%Y-%m-%d %H:%M:%S.%f') # '2000-01-02 12:34:56.789012' #", "hints) quiet_remove(unhandled_hints, 'compression') if hints.quoting is None: pandas_options['quoting'] = csv.QUOTE_NONE elif hints.quoting ==", "'BZIP': pandas_options['compression'] = 'bz2' else: cant_handle_hint(fail_if_cant_handle_hint, 'compression', hints) quiet_remove(unhandled_hints, 'compression') if hints.quoting is", "BigQuery assumes what it gets in # is UTC format. Boo. # #", "from ...utils import quiet_remove from ..delimited import cant_handle_hint from ..processing_instructions import ProcessingInstructions from", "hints.compression == 'GZIP': pandas_options['compression'] = 'gzip' elif hints.compression == 'BZIP': pandas_options['compression'] = 'bz2'", "a date and time together :( # # might be nice someday to", "else: cant_handle_hint(fail_if_cant_handle_hint, 'compression', hints) quiet_remove(unhandled_hints, 'compression') if hints.quoting is None: pandas_options['quoting'] = csv.QUOTE_NONE", "hints.dateformat == 'DD-MM-YYYY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f' else: pandas_options['date_format']", "...utils import quiet_remove from ..delimited import cant_handle_hint from ..processing_instructions import ProcessingInstructions from ..records_format", "%H:%M:%S.%f' else: pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f%z' elif hints.dateformat == 'MM/DD/YY': if hints.datetimeformattz ==", "hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z' elif", "== 'nonnumeric': pandas_options['quoting'] = csv.QUOTE_NONNUMERIC else: _assert_never(hints.quoting) quiet_remove(unhandled_hints, 'quoting') pandas_options['doublequote'] = hints.doublequote quiet_remove(unhandled_hints,", "records_mover.mover_types import _assert_never import logging from typing import Set, Dict logger = logging.getLogger(__name__)", "if hints.datetimeformattz == hints.datetimeformat: # BigQuery requires that timezone offsets have a colon;", "pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f%z' elif hints.dateformat == 'DD-MM-YYYY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format']", "pandas_options['compression'] = 'gzip' elif hints.compression == 'BZIP': pandas_options['compression'] = 'bz2' else: cant_handle_hint(fail_if_cant_handle_hint, 'compression',", "affected by whatever limitation... if (hints.datetimeformattz not in (f\"{hints.dateformat} HH24:MI:SSOF\", f\"{hints.dateformat} HH:MI:SSOF\", f\"{hints.dateformat}", "fail_if_cant_handle_hint = processing_instructions.fail_if_cant_handle_hint pandas_options: Dict[str, object] = {} pandas_options['encoding'] = hints.encoding quiet_remove(unhandled_hints, 'encoding')", "= hints.quotechar quiet_remove(unhandled_hints, 'quotechar') if hints.escape is None: pass else: pandas_options['escapechar'] = hints.escape", "in (f\"{hints.dateformat} HH24:MI:SSOF\", f\"{hints.dateformat} HH:MI:SSOF\", f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH:MIOF\", f\"{hints.dateformat} HH:MI\",", "def pandas_to_csv_options(records_format: DelimitedRecordsFormat, unhandled_hints: Set[str], processing_instructions: ProcessingInstructions) -> Dict[str, object]: # https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_csv.html hints", "fine. # # Unfortunately Python/Pandas will drop the timezone info # instead of", "an uncompressed csv, # which is the pandas default. pass elif hints.compression ==", "pytz.timezone('US/Eastern') # >>> import datetime # >>> us_eastern.localize(datetime.datetime(2000, 1, 2, 12, 34, 56,", "# BigQuery requires that timezone offsets have a colon; # Python (and thus", "== hints.datetimeformat: pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f' else: pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f%z' elif hints.dateformat", "hints.datetimeformat: pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f' else: pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f%z' elif hints.dateformat ==", "emit the errors if the actual data # being moved is affected by", "hints.quotechar quiet_remove(unhandled_hints, 'quotechar') if hints.escape is None: pass else: pandas_options['escapechar'] = hints.escape quiet_remove(unhandled_hints,", "!= 'HH24:MI:SS': cant_handle_hint(fail_if_cant_handle_hint, 'timeonlyformat', hints) quiet_remove(unhandled_hints, 'timeonlyformat') pandas_options['sep'] = hints.field_delimiter quiet_remove(unhandled_hints, 'field-delimiter') pandas_options['line_terminator']", "= hints.escape quiet_remove(unhandled_hints, 'escape') pandas_options['header'] = hints.header_row quiet_remove(unhandled_hints, 'header-row') if hints.dateformat is None:", "hints.datetimeformat: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat ==", "None: if hints.datetimeformattz == hints.datetimeformat: # BigQuery requires that timezone offsets have a", "quiet_remove(unhandled_hints, 'quoting') pandas_options['doublequote'] = hints.doublequote quiet_remove(unhandled_hints, 'doublequote') pandas_options['quotechar'] = hints.quotechar quiet_remove(unhandled_hints, 'quotechar') if", "as BigQuery assumes what it gets in # is UTC format. Boo. #", "if (hints.datetimeformattz not in (f\"{hints.dateformat} HH24:MI:SSOF\", f\"{hints.dateformat} HH:MI:SSOF\", f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat}", "== 'YYYY-MM-DD': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format'] =", "pandas_options['quoting'] = csv.QUOTE_NONNUMERIC else: _assert_never(hints.quoting) quiet_remove(unhandled_hints, 'quoting') pandas_options['doublequote'] = hints.doublequote quiet_remove(unhandled_hints, 'doublequote') pandas_options['quotechar']", "HH:MI:SS\", f\"{hints.dateformat} HH:MIOF\", f\"{hints.dateformat} HH:MI\", f\"{hints.dateformat} HH24:MIOF\", f\"{hints.dateformat} HH24:MI\")): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformattz', hints) quiet_remove(unhandled_hints,", "Dict[str, object] = {} pandas_options['encoding'] = hints.encoding quiet_remove(unhandled_hints, 'encoding') if hints.compression is None:", "pandas_options['quoting'] = csv.QUOTE_ALL elif hints.quoting == 'minimal': pandas_options['quoting'] = csv.QUOTE_MINIMAL elif hints.quoting ==", "uncompressed csv, # which is the pandas default. pass elif hints.compression == 'GZIP':", "'quotechar') if hints.escape is None: pass else: pandas_options['escapechar'] = hints.escape quiet_remove(unhandled_hints, 'escape') pandas_options['header']", "cant_handle_hint(fail_if_cant_handle_hint, 'dateformat', hints) quiet_remove(unhandled_hints, 'dateformat') # pandas can't seem to export a date", "# # Unfortunately Python/Pandas will drop the timezone info # instead of converting", "%H:%M:%S.%f') # '2000-01-02 12:34:56.789012' # >>> # # https://github.com/bluelabsio/records-mover/issues/95 pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f'", "hints.quoting == 'nonnumeric': pandas_options['quoting'] = csv.QUOTE_NONNUMERIC else: _assert_never(hints.quoting) quiet_remove(unhandled_hints, 'quoting') pandas_options['doublequote'] = hints.doublequote", "= '%d-%m-%Y %H:%M:%S.%f' else: pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f%z' elif hints.dateformat == 'MM/DD/YY': if", "gets in # is UTC format. Boo. # # $ python3 # >>>", "date and time together :( # # might be nice someday to only", "import Set, Dict logger = logging.getLogger(__name__) def pandas_to_csv_options(records_format: DelimitedRecordsFormat, unhandled_hints: Set[str], processing_instructions: ProcessingInstructions)", "only emit the errors if the actual data # being moved is affected", "quiet_remove(unhandled_hints, 'encoding') if hints.compression is None: # hints['compression']=None will output an uncompressed csv,", "hints.header_row quiet_remove(unhandled_hints, 'header-row') if hints.dateformat is None: if hints.datetimeformattz == hints.datetimeformat: # BigQuery", "the timestamp to UTC. This # corrupts the time, as BigQuery assumes what", "'dateformat', hints) quiet_remove(unhandled_hints, 'dateformat') # pandas can't seem to export a date and", "import cant_handle_hint from ..processing_instructions import ProcessingInstructions from ..records_format import DelimitedRecordsFormat from records_mover.mover_types import", "can't seem to export a date and time together :( # # might", "= csv.QUOTE_MINIMAL elif hints.quoting == 'nonnumeric': pandas_options['quoting'] = csv.QUOTE_NONNUMERIC else: _assert_never(hints.quoting) quiet_remove(unhandled_hints, 'quoting')", "= '%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat == 'YYYY-MM-DD': if", "%H:%M:%S.%f' else: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat == 'YYYY-MM-DD': if hints.datetimeformattz ==", "== 'all': pandas_options['quoting'] = csv.QUOTE_ALL elif hints.quoting == 'minimal': pandas_options['quoting'] = csv.QUOTE_MINIMAL elif", "%H:%M:%S.%f%z' elif hints.dateformat == 'YYYY-MM-DD': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f'", "timestamp to UTC. This # corrupts the time, as BigQuery assumes what it", "f\"{hints.dateformat} HH:MIOF\", f\"{hints.dateformat} HH:MI\", f\"{hints.dateformat} HH24:MIOF\", f\"{hints.dateformat} HH24:MI\")): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformattz', hints) quiet_remove(unhandled_hints, 'datetimeformattz')", "f\"{hints.dateformat} HH:MI\", f\"{hints.dateformat} HH24:MIOF\", f\"{hints.dateformat} HH24:MI\")): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformattz', hints) quiet_remove(unhandled_hints, 'datetimeformattz') valid_datetimeformat =", "have a colon; # Python (and thus Pandas) doesn't support adding the #", "if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f' else: pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f%z'", "# # might be nice someday to only emit the errors if the", "== hints.datetimeformat: pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f' else: pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f%z' else: cant_handle_hint(fail_if_cant_handle_hint,", "== hints.datetimeformat: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat", "if the actual data # being moved is affected by whatever limitation... if", "ProcessingInstructions) -> Dict[str, object]: # https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_csv.html hints = records_format.\\ validate(fail_if_cant_handle_hint=processing_instructions.fail_if_cant_handle_hint) fail_if_cant_handle_hint = processing_instructions.fail_if_cant_handle_hint", "https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_csv.html hints = records_format.\\ validate(fail_if_cant_handle_hint=processing_instructions.fail_if_cant_handle_hint) fail_if_cant_handle_hint = processing_instructions.fail_if_cant_handle_hint pandas_options: Dict[str, object] = {}", "csv, # which is the pandas default. pass elif hints.compression == 'GZIP': pandas_options['compression']", "'YYYY-MM-DD': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format'] = '%Y-%m-%d", "(and thus Pandas) doesn't support adding the # colon with strftime. However, we", "will drop the timezone info # instead of converting the timestamp to UTC.", "'MM/DD/YY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f' else: pandas_options['date_format'] = '%m/%d/%y", "requires that timezone offsets have a colon; # Python (and thus Pandas) doesn't", "pandas_options['encoding'] = hints.encoding quiet_remove(unhandled_hints, 'encoding') if hints.compression is None: # hints['compression']=None will output", "logging from typing import Set, Dict logger = logging.getLogger(__name__) def pandas_to_csv_options(records_format: DelimitedRecordsFormat, unhandled_hints:", "hints.dateformat == 'MM/DD/YY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%m/%d/%y %H:%M:%S.%f' else: pandas_options['date_format']", "strftime. However, we can specify things # without a timezone delimiter just fine.", "== 'MM-DD-YYYY': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%m-%d-%Y %H:%M:%S.%f' else: pandas_options['date_format'] =", "None: pandas_options['quoting'] = csv.QUOTE_NONE elif hints.quoting == 'all': pandas_options['quoting'] = csv.QUOTE_ALL elif hints.quoting", "pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat == 'YYYY-MM-DD': if hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format']", "from typing import Set, Dict logger = logging.getLogger(__name__) def pandas_to_csv_options(records_format: DelimitedRecordsFormat, unhandled_hints: Set[str],", "= pytz.timezone('US/Eastern') # >>> import datetime # >>> us_eastern.localize(datetime.datetime(2000, 1, 2, 12, 34,", "hints.datetimeformattz == hints.datetimeformat: pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f' else: pandas_options['date_format'] = '%d-%m-%Y %H:%M:%S.%f%z' elif", "pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f' else: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat == 'YYYY-MM-DD':", "hints) quiet_remove(unhandled_hints, 'datetimeformattz') valid_datetimeformat = [ f\"{hints.dateformat} HH24:MI:SS\", f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH24:MI\", f\"{hints.dateformat}", "import DelimitedRecordsFormat from records_mover.mover_types import _assert_never import logging from typing import Set, Dict", "us_eastern = pytz.timezone('US/Eastern') # >>> import datetime # >>> us_eastern.localize(datetime.datetime(2000, 1, 2, 12,", "None: pass else: pandas_options['escapechar'] = hints.escape quiet_remove(unhandled_hints, 'escape') pandas_options['header'] = hints.header_row quiet_remove(unhandled_hints, 'header-row')", "BigQuery requires that timezone offsets have a colon; # Python (and thus Pandas)", "data # being moved is affected by whatever limitation... if (hints.datetimeformattz not in", "else: pandas_options['date_format'] = '%Y-%m-%d %H:%M:%S.%f%z' elif hints.dateformat == 'MM-DD-YYYY': if hints.datetimeformattz == hints.datetimeformat:", "'encoding') if hints.compression is None: # hints['compression']=None will output an uncompressed csv, #", "hints.quoting == 'minimal': pandas_options['quoting'] = csv.QUOTE_MINIMAL elif hints.quoting == 'nonnumeric': pandas_options['quoting'] = csv.QUOTE_NONNUMERIC", "if hints.dateformat is None: if hints.datetimeformattz == hints.datetimeformat: # BigQuery requires that timezone", "f\"{hints.dateformat} HH:MI:SS\", f\"{hints.dateformat} HH:MIOF\", f\"{hints.dateformat} HH:MI\", f\"{hints.dateformat} HH24:MIOF\", f\"{hints.dateformat} HH24:MI\")): cant_handle_hint(fail_if_cant_handle_hint, 'datetimeformattz', hints)", "thus Pandas) doesn't support adding the # colon with strftime. However, we can" ]
[ "import ensure_neighbour_removed_from_config_by_host from tests.testcase import TestCase class TestEnsureNeighbourRemovedFromConfigByHost(TestCase): def setUp(self): self._del_neighbour_by_key = self.set_up_patch(", "from raptiformica.actions.prune import ensure_neighbour_removed_from_config_by_host from tests.testcase import TestCase class TestEnsureNeighbourRemovedFromConfigByHost(TestCase): def setUp(self): self._del_neighbour_by_key", "TestCase class TestEnsureNeighbourRemovedFromConfigByHost(TestCase): def setUp(self): self._del_neighbour_by_key = self.set_up_patch( 'raptiformica.actions.prune._del_neighbour_by_key' ) def test_ensure_neighbour_removed_from_config_by_host_deleted_neighbour_by_host(self): ensure_neighbour_removed_from_config_by_host('1.2.3.4')", "ensure_neighbour_removed_from_config_by_host from tests.testcase import TestCase class TestEnsureNeighbourRemovedFromConfigByHost(TestCase): def setUp(self): self._del_neighbour_by_key = self.set_up_patch( 'raptiformica.actions.prune._del_neighbour_by_key'", "import TestCase class TestEnsureNeighbourRemovedFromConfigByHost(TestCase): def setUp(self): self._del_neighbour_by_key = self.set_up_patch( 'raptiformica.actions.prune._del_neighbour_by_key' ) def test_ensure_neighbour_removed_from_config_by_host_deleted_neighbour_by_host(self):", "class TestEnsureNeighbourRemovedFromConfigByHost(TestCase): def setUp(self): self._del_neighbour_by_key = self.set_up_patch( 'raptiformica.actions.prune._del_neighbour_by_key' ) def test_ensure_neighbour_removed_from_config_by_host_deleted_neighbour_by_host(self): ensure_neighbour_removed_from_config_by_host('1.2.3.4') self._del_neighbour_by_key.assert_called_once_with(", "TestEnsureNeighbourRemovedFromConfigByHost(TestCase): def setUp(self): self._del_neighbour_by_key = self.set_up_patch( 'raptiformica.actions.prune._del_neighbour_by_key' ) def test_ensure_neighbour_removed_from_config_by_host_deleted_neighbour_by_host(self): ensure_neighbour_removed_from_config_by_host('1.2.3.4') self._del_neighbour_by_key.assert_called_once_with( 'host',", "def setUp(self): self._del_neighbour_by_key = self.set_up_patch( 'raptiformica.actions.prune._del_neighbour_by_key' ) def test_ensure_neighbour_removed_from_config_by_host_deleted_neighbour_by_host(self): ensure_neighbour_removed_from_config_by_host('1.2.3.4') self._del_neighbour_by_key.assert_called_once_with( 'host', '1.2.3.4'", "from tests.testcase import TestCase class TestEnsureNeighbourRemovedFromConfigByHost(TestCase): def setUp(self): self._del_neighbour_by_key = self.set_up_patch( 'raptiformica.actions.prune._del_neighbour_by_key' )", "raptiformica.actions.prune import ensure_neighbour_removed_from_config_by_host from tests.testcase import TestCase class TestEnsureNeighbourRemovedFromConfigByHost(TestCase): def setUp(self): self._del_neighbour_by_key =", "setUp(self): self._del_neighbour_by_key = self.set_up_patch( 'raptiformica.actions.prune._del_neighbour_by_key' ) def test_ensure_neighbour_removed_from_config_by_host_deleted_neighbour_by_host(self): ensure_neighbour_removed_from_config_by_host('1.2.3.4') self._del_neighbour_by_key.assert_called_once_with( 'host', '1.2.3.4' )", "tests.testcase import TestCase class TestEnsureNeighbourRemovedFromConfigByHost(TestCase): def setUp(self): self._del_neighbour_by_key = self.set_up_patch( 'raptiformica.actions.prune._del_neighbour_by_key' ) def" ]
[ "angle depends on the lane direction def plan(self, horizon=10): trajectory = quinic_polynomials_planner(sx=self.x, sy=self.y,", "TeacherQuinticPolynomials(Agent): def learn(self, state, action): raise NotImplementedError() def explore(self, state, horizon=1): raise NotImplementedError()", "self.execute() def execute(self, action, horizon=1): for _ in range(horizon): self.x = self.x +", "self.goal = self.lane.end_middle() self.goal = self.goal[0], self.goal[1], 0.0 # the angle depends on", "explore(self, state, horizon=1): raise NotImplementedError() def __init__(self, world, lane): Agent.__init__(self, world) self.lane =", "in range(horizon): self.execute() def execute(self, action, horizon=1): for _ in range(horizon): self.x =", "from math import cos, sin import numpy as np from ....simulator import Agent", "def plan(self, horizon=10): trajectory = quinic_polynomials_planner(sx=self.x, sy=self.y, syaw=self.theta, sv=self.v, sa=0.0, gx=self.goal[0], gy=self.goal[1], gyaw=self.goal[2],", "for _ in range(horizon): self.x = self.x + self.v * cos(action) self.y =", "import Agent from .quintic_polynomials_planner import quinic_polynomials_planner class TeacherQuinticPolynomials(Agent): def learn(self, state, action): raise", "from .quintic_polynomials_planner import quinic_polynomials_planner class TeacherQuinticPolynomials(Agent): def learn(self, state, action): raise NotImplementedError() def", "= quinic_polynomials_planner(sx=self.x, sy=self.y, syaw=self.theta, sv=self.v, sa=0.0, gx=self.goal[0], gy=self.goal[1], gyaw=self.goal[2], gv=0.0, ga=0.0, max_accel=0.0, max_jerk=0.1,", "self.lane.end_middle() self.goal = self.goal[0], self.goal[1], 0.0 # the angle depends on the lane", "self.goal[1], 0.0 # the angle depends on the lane direction def plan(self, horizon=10):", "exploit(self, state, horizon=1): if self.navigation_plan is None: self.navigation_plan = self.plan() for _ in", "is None: self.navigation_plan = self.plan() for _ in range(horizon): self.execute() def execute(self, action,", "__init__(self, world, lane): Agent.__init__(self, world) self.lane = lane self.navigation_plan = None self.goal =", "sv=self.v, sa=0.0, gx=self.goal[0], gy=self.goal[1], gyaw=self.goal[2], gv=0.0, ga=0.0, max_accel=0.0, max_jerk=0.1, dt=1) return np.array(trajectory[3])[:horizon] def", "execute(self, action, horizon=1): for _ in range(horizon): self.x = self.x + self.v *", "self.navigation_plan = self.plan() for _ in range(horizon): self.execute() def execute(self, action, horizon=1): for", "max_jerk=0.1, dt=1) return np.array(trajectory[3])[:horizon] def exploit(self, state, horizon=1): if self.navigation_plan is None: self.navigation_plan", "sa=0.0, gx=self.goal[0], gy=self.goal[1], gyaw=self.goal[2], gv=0.0, ga=0.0, max_accel=0.0, max_jerk=0.1, dt=1) return np.array(trajectory[3])[:horizon] def exploit(self,", "action): raise NotImplementedError() def explore(self, state, horizon=1): raise NotImplementedError() def __init__(self, world, lane):", "if self.navigation_plan is None: self.navigation_plan = self.plan() for _ in range(horizon): self.execute() def", "range(horizon): self.execute() def execute(self, action, horizon=1): for _ in range(horizon): self.x = self.x", "def execute(self, action, horizon=1): for _ in range(horizon): self.x = self.x + self.v", "world) self.lane = lane self.navigation_plan = None self.goal = self.lane.end_middle() self.goal = self.goal[0],", "Agent from .quintic_polynomials_planner import quinic_polynomials_planner class TeacherQuinticPolynomials(Agent): def learn(self, state, action): raise NotImplementedError()", "self.navigation_plan is None: self.navigation_plan = self.plan() for _ in range(horizon): self.execute() def execute(self,", "action, horizon=1): for _ in range(horizon): self.x = self.x + self.v * cos(action)", "state, horizon=1): raise NotImplementedError() def __init__(self, world, lane): Agent.__init__(self, world) self.lane = lane", "cos, sin import numpy as np from ....simulator import Agent from .quintic_polynomials_planner import", "# the angle depends on the lane direction def plan(self, horizon=10): trajectory =", "range(horizon): self.x = self.x + self.v * cos(action) self.y = self.y + self.v", "= lane self.navigation_plan = None self.goal = self.lane.end_middle() self.goal = self.goal[0], self.goal[1], 0.0", "self.lane = lane self.navigation_plan = None self.goal = self.lane.end_middle() self.goal = self.goal[0], self.goal[1],", "horizon=1): for _ in range(horizon): self.x = self.x + self.v * cos(action) self.y", "....simulator import Agent from .quintic_polynomials_planner import quinic_polynomials_planner class TeacherQuinticPolynomials(Agent): def learn(self, state, action):", "state, action): raise NotImplementedError() def explore(self, state, horizon=1): raise NotImplementedError() def __init__(self, world,", "trajectory = quinic_polynomials_planner(sx=self.x, sy=self.y, syaw=self.theta, sv=self.v, sa=0.0, gx=self.goal[0], gy=self.goal[1], gyaw=self.goal[2], gv=0.0, ga=0.0, max_accel=0.0,", "lane self.navigation_plan = None self.goal = self.lane.end_middle() self.goal = self.goal[0], self.goal[1], 0.0 #", "import quinic_polynomials_planner class TeacherQuinticPolynomials(Agent): def learn(self, state, action): raise NotImplementedError() def explore(self, state,", "syaw=self.theta, sv=self.v, sa=0.0, gx=self.goal[0], gy=self.goal[1], gyaw=self.goal[2], gv=0.0, ga=0.0, max_accel=0.0, max_jerk=0.1, dt=1) return np.array(trajectory[3])[:horizon]", "def __init__(self, world, lane): Agent.__init__(self, world) self.lane = lane self.navigation_plan = None self.goal", "class TeacherQuinticPolynomials(Agent): def learn(self, state, action): raise NotImplementedError() def explore(self, state, horizon=1): raise", "np from ....simulator import Agent from .quintic_polynomials_planner import quinic_polynomials_planner class TeacherQuinticPolynomials(Agent): def learn(self,", "state, horizon=1): if self.navigation_plan is None: self.navigation_plan = self.plan() for _ in range(horizon):", "quinic_polynomials_planner(sx=self.x, sy=self.y, syaw=self.theta, sv=self.v, sa=0.0, gx=self.goal[0], gy=self.goal[1], gyaw=self.goal[2], gv=0.0, ga=0.0, max_accel=0.0, max_jerk=0.1, dt=1)", "world, lane): Agent.__init__(self, world) self.lane = lane self.navigation_plan = None self.goal = self.lane.end_middle()", "learn(self, state, action): raise NotImplementedError() def explore(self, state, horizon=1): raise NotImplementedError() def __init__(self,", "self.goal[0], self.goal[1], 0.0 # the angle depends on the lane direction def plan(self,", "None: self.navigation_plan = self.plan() for _ in range(horizon): self.execute() def execute(self, action, horizon=1):", "return np.array(trajectory[3])[:horizon] def exploit(self, state, horizon=1): if self.navigation_plan is None: self.navigation_plan = self.plan()", "= None self.goal = self.lane.end_middle() self.goal = self.goal[0], self.goal[1], 0.0 # the angle", "NotImplementedError() def explore(self, state, horizon=1): raise NotImplementedError() def __init__(self, world, lane): Agent.__init__(self, world)", "raise NotImplementedError() def __init__(self, world, lane): Agent.__init__(self, world) self.lane = lane self.navigation_plan =", "in range(horizon): self.x = self.x + self.v * cos(action) self.y = self.y +", "depends on the lane direction def plan(self, horizon=10): trajectory = quinic_polynomials_planner(sx=self.x, sy=self.y, syaw=self.theta,", "numpy as np from ....simulator import Agent from .quintic_polynomials_planner import quinic_polynomials_planner class TeacherQuinticPolynomials(Agent):", "self.plan() for _ in range(horizon): self.execute() def execute(self, action, horizon=1): for _ in", "ga=0.0, max_accel=0.0, max_jerk=0.1, dt=1) return np.array(trajectory[3])[:horizon] def exploit(self, state, horizon=1): if self.navigation_plan is", "= self.plan() for _ in range(horizon): self.execute() def execute(self, action, horizon=1): for _", "def explore(self, state, horizon=1): raise NotImplementedError() def __init__(self, world, lane): Agent.__init__(self, world) self.lane", ".quintic_polynomials_planner import quinic_polynomials_planner class TeacherQuinticPolynomials(Agent): def learn(self, state, action): raise NotImplementedError() def explore(self,", "_ in range(horizon): self.execute() def execute(self, action, horizon=1): for _ in range(horizon): self.x", "lane direction def plan(self, horizon=10): trajectory = quinic_polynomials_planner(sx=self.x, sy=self.y, syaw=self.theta, sv=self.v, sa=0.0, gx=self.goal[0],", "for _ in range(horizon): self.execute() def execute(self, action, horizon=1): for _ in range(horizon):", "raise NotImplementedError() def explore(self, state, horizon=1): raise NotImplementedError() def __init__(self, world, lane): Agent.__init__(self,", "quinic_polynomials_planner class TeacherQuinticPolynomials(Agent): def learn(self, state, action): raise NotImplementedError() def explore(self, state, horizon=1):", "import numpy as np from ....simulator import Agent from .quintic_polynomials_planner import quinic_polynomials_planner class", "gy=self.goal[1], gyaw=self.goal[2], gv=0.0, ga=0.0, max_accel=0.0, max_jerk=0.1, dt=1) return np.array(trajectory[3])[:horizon] def exploit(self, state, horizon=1):", "def exploit(self, state, horizon=1): if self.navigation_plan is None: self.navigation_plan = self.plan() for _", "Agent.__init__(self, world) self.lane = lane self.navigation_plan = None self.goal = self.lane.end_middle() self.goal =", "horizon=1): if self.navigation_plan is None: self.navigation_plan = self.plan() for _ in range(horizon): self.execute()", "self.x = self.x + self.v * cos(action) self.y = self.y + self.v *", "the angle depends on the lane direction def plan(self, horizon=10): trajectory = quinic_polynomials_planner(sx=self.x,", "gx=self.goal[0], gy=self.goal[1], gyaw=self.goal[2], gv=0.0, ga=0.0, max_accel=0.0, max_jerk=0.1, dt=1) return np.array(trajectory[3])[:horizon] def exploit(self, state,", "self.goal = self.goal[0], self.goal[1], 0.0 # the angle depends on the lane direction", "None self.goal = self.lane.end_middle() self.goal = self.goal[0], self.goal[1], 0.0 # the angle depends", "= self.lane.end_middle() self.goal = self.goal[0], self.goal[1], 0.0 # the angle depends on the", "= self.x + self.v * cos(action) self.y = self.y + self.v * sin(action)", "def learn(self, state, action): raise NotImplementedError() def explore(self, state, horizon=1): raise NotImplementedError() def", "= self.goal[0], self.goal[1], 0.0 # the angle depends on the lane direction def", "the lane direction def plan(self, horizon=10): trajectory = quinic_polynomials_planner(sx=self.x, sy=self.y, syaw=self.theta, sv=self.v, sa=0.0,", "gyaw=self.goal[2], gv=0.0, ga=0.0, max_accel=0.0, max_jerk=0.1, dt=1) return np.array(trajectory[3])[:horizon] def exploit(self, state, horizon=1): if", "horizon=1): raise NotImplementedError() def __init__(self, world, lane): Agent.__init__(self, world) self.lane = lane self.navigation_plan", "dt=1) return np.array(trajectory[3])[:horizon] def exploit(self, state, horizon=1): if self.navigation_plan is None: self.navigation_plan =", "0.0 # the angle depends on the lane direction def plan(self, horizon=10): trajectory", "np.array(trajectory[3])[:horizon] def exploit(self, state, horizon=1): if self.navigation_plan is None: self.navigation_plan = self.plan() for", "as np from ....simulator import Agent from .quintic_polynomials_planner import quinic_polynomials_planner class TeacherQuinticPolynomials(Agent): def", "on the lane direction def plan(self, horizon=10): trajectory = quinic_polynomials_planner(sx=self.x, sy=self.y, syaw=self.theta, sv=self.v,", "import cos, sin import numpy as np from ....simulator import Agent from .quintic_polynomials_planner", "max_accel=0.0, max_jerk=0.1, dt=1) return np.array(trajectory[3])[:horizon] def exploit(self, state, horizon=1): if self.navigation_plan is None:", "self.navigation_plan = None self.goal = self.lane.end_middle() self.goal = self.goal[0], self.goal[1], 0.0 # the", "gv=0.0, ga=0.0, max_accel=0.0, max_jerk=0.1, dt=1) return np.array(trajectory[3])[:horizon] def exploit(self, state, horizon=1): if self.navigation_plan", "lane): Agent.__init__(self, world) self.lane = lane self.navigation_plan = None self.goal = self.lane.end_middle() self.goal", "plan(self, horizon=10): trajectory = quinic_polynomials_planner(sx=self.x, sy=self.y, syaw=self.theta, sv=self.v, sa=0.0, gx=self.goal[0], gy=self.goal[1], gyaw=self.goal[2], gv=0.0,", "sin import numpy as np from ....simulator import Agent from .quintic_polynomials_planner import quinic_polynomials_planner", "from ....simulator import Agent from .quintic_polynomials_planner import quinic_polynomials_planner class TeacherQuinticPolynomials(Agent): def learn(self, state,", "math import cos, sin import numpy as np from ....simulator import Agent from", "sy=self.y, syaw=self.theta, sv=self.v, sa=0.0, gx=self.goal[0], gy=self.goal[1], gyaw=self.goal[2], gv=0.0, ga=0.0, max_accel=0.0, max_jerk=0.1, dt=1) return", "horizon=10): trajectory = quinic_polynomials_planner(sx=self.x, sy=self.y, syaw=self.theta, sv=self.v, sa=0.0, gx=self.goal[0], gy=self.goal[1], gyaw=self.goal[2], gv=0.0, ga=0.0,", "NotImplementedError() def __init__(self, world, lane): Agent.__init__(self, world) self.lane = lane self.navigation_plan = None", "direction def plan(self, horizon=10): trajectory = quinic_polynomials_planner(sx=self.x, sy=self.y, syaw=self.theta, sv=self.v, sa=0.0, gx=self.goal[0], gy=self.goal[1],", "_ in range(horizon): self.x = self.x + self.v * cos(action) self.y = self.y" ]
[ "type=str, default='./preprocess_Result') parser.add_argument('--batch_size', type=int, default=320) parser.add_argument('--num_shots', type=int, default=1) args_opt = parser.parse_args() context.set_context(mode=context.GRAPH_MODE, device_target=args_opt.device_target,", "for k in aves_keys} label_list = np.load(os.path.join(args.pre_result_path, \"label.npy\"), allow_pickle=True) shape_list = np.load(os.path.join(args.pre_result_path, \"shape.npy\"),", "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", "= x_query + ops.ExpandDims()(bias, 1) x_shot = x_shot.mean(axis=-2) x_shot = ops.L2Normalize(axis=-1)(x_shot) x_query =", "enumerate(n_shots): np.random.seed(0) label_shot = label_list[i] for j in range(file_num): labels = Tensor(label_shot[j]) f", "np.load(os.path.join(args.pre_result_path, \"label.npy\"), allow_pickle=True) shape_list = np.load(os.path.join(args.pre_result_path, \"shape.npy\"), allow_pickle=True) x_shot_shape = shape_list[0] x_query_shape =", "args.num_shots) aves_keys = ['tl', 'ta', 'vl', 'va'] for n_shot in n_shots: aves_keys +=", "choices=['Ascend', 'GPU', 'CPU']) parser.add_argument('--dataset', default='mini-imagenet') parser.add_argument('--post_result_path', default='./result_Files') parser.add_argument('--pre_result_path', type=str, default='./preprocess_Result') parser.add_argument('--batch_size', type=int, default=320)", "+ \"_\" + str(j) + \"_0.bin\") x_tot = Tensor(np.fromfile(f, np.float32).reshape(args.batch_size, 512)) x_shot, x_query", "np import mindspore as ms from mindspore import ops, Tensor, context import src.util", "this file except in compliance with the License. # You may obtain a", "x_shot = x_shot.view(*shot_shape, -1) x_query = x_query.view(*query_shape, -1) ########## cross-class bias ############ bs", "parser.add_argument('--post_result_path', default='./result_Files') parser.add_argument('--pre_result_path', type=str, default='./preprocess_Result') parser.add_argument('--batch_size', type=int, default=320) parser.add_argument('--num_shots', type=int, default=1) args_opt =", "fs).mean(1) - x_query.mean(1) x_query = x_query + ops.ExpandDims()(bias, 1) x_shot = x_shot.mean(axis=-2) x_shot", "os import argparse from functools import reduce import numpy as np import mindspore", "Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0", "argparse.ArgumentParser() parser.add_argument('--device_target', type=str, default='CPU', choices=['Ascend', 'GPU', 'CPU']) parser.add_argument('--dataset', default='mini-imagenet') parser.add_argument('--post_result_path', default='./result_Files') parser.add_argument('--pre_result_path', type=str,", "ANY KIND, either express or implied. # See the License for the specific", "x_shot.view(bs, -1, fs).mean(1) - x_query.mean(1) x_query = x_query + ops.ExpandDims()(bias, 1) x_shot =", "= int(len(os.listdir(args.post_result_path)) / args.num_shots) aves_keys = ['tl', 'ta', 'vl', 'va'] for n_shot in", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "\"\"\" postprocess \"\"\" import os import argparse from functools import reduce import numpy", "context import src.util as util def cal_acc(args): \"\"\" :return: meta-baseline eval \"\"\" temp", "k, v in aves.items(): aves[k] = v.item() for n_shot in n_shots: key =", "n_shots: key = 'fsa-' + str(n_shot) print(\"epoch {}, {}-shot, val acc {:.4f}\".format(str(1), n_shot,", "x_tot[-x_query_len:] x_shot = x_shot.view(*shot_shape, -1) x_query = x_query.view(*query_shape, -1) ########## cross-class bias ############", "in n_shots: aves_keys += ['fsa-' + str(n_shot)] aves = {k: util.Averager() for k", "k in aves_keys} label_list = np.load(os.path.join(args.pre_result_path, \"label.npy\"), allow_pickle=True) shape_list = np.load(os.path.join(args.pre_result_path, \"shape.npy\"), allow_pickle=True)", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "parser.add_argument('--dataset', default='mini-imagenet') parser.add_argument('--post_result_path', default='./result_Files') parser.add_argument('--pre_result_path', type=str, default='./preprocess_Result') parser.add_argument('--batch_size', type=int, default=320) parser.add_argument('--num_shots', type=int, default=1)", "np.float32).reshape(args.batch_size, 512)) x_shot, x_query = x_tot[:x_shot_len], x_tot[-x_query_len:] x_shot = x_shot.view(*shot_shape, -1) x_query =", "* temp ret = ops.Argmax()(logits) == labels.astype(ms.int32) acc = ret.astype(ms.float32).mean() aves['fsa-' + str(n_shot)].add(acc.asnumpy())", "mindspore as ms from mindspore import ops, Tensor, context import src.util as util", "OF ANY KIND, either express or implied. # See the License for the", "/ args.num_shots) aves_keys = ['tl', 'ta', 'vl', 'va'] for n_shot in n_shots: aves_keys", "query_shape) for i, n_shot in enumerate(n_shots): np.random.seed(0) label_shot = label_list[i] for j in", "= x_shot.shape[-1] bias = x_shot.view(bs, -1, fs).mean(1) - x_query.mean(1) x_query = x_query +", "= [args.num_shots] file_num = int(len(os.listdir(args.post_result_path)) / args.num_shots) aves_keys = ['tl', 'ta', 'vl', 'va']", "Tensor, context import src.util as util def cal_acc(args): \"\"\" :return: meta-baseline eval \"\"\"", "def cal_acc(args): \"\"\" :return: meta-baseline eval \"\"\" temp = 5. n_shots = [args.num_shots]", "bias = x_shot.view(bs, -1, fs).mean(1) - x_query.mean(1) x_query = x_query + ops.ExpandDims()(bias, 1)", "reduce import numpy as np import mindspore as ms from mindspore import ops,", "{k: util.Averager() for k in aves_keys} label_list = np.load(os.path.join(args.pre_result_path, \"label.npy\"), allow_pickle=True) shape_list =", "limitations under the License. # ============================================================================ \"\"\" postprocess \"\"\" import os import argparse", "as np import mindspore as ms from mindspore import ops, Tensor, context import", "ms from mindspore import ops, Tensor, context import src.util as util def cal_acc(args):", "x_shot.mean(axis=-2) x_shot = ops.L2Normalize(axis=-1)(x_shot) x_query = ops.L2Normalize(axis=-1)(x_query) logits = ops.BatchMatMul()(x_query, x_shot.transpose(0, 2, 1))", "Co., Ltd # # Licensed under the Apache License, Version 2.0 (the \"License\");", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "x_query = x_query + ops.ExpandDims()(bias, 1) x_shot = x_shot.mean(axis=-2) x_shot = ops.L2Normalize(axis=-1)(x_shot) x_query", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "j in range(file_num): labels = Tensor(label_shot[j]) f = os.path.join(args.post_result_path, \"nshot_\" + str(i) +", "import ops, Tensor, context import src.util as util def cal_acc(args): \"\"\" :return: meta-baseline", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "int(len(os.listdir(args.post_result_path)) / args.num_shots) aves_keys = ['tl', 'ta', 'vl', 'va'] for n_shot in n_shots:", "2, 1)) logits = logits * temp ret = ops.Argmax()(logits) == labels.astype(ms.int32) acc", "# Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache", "[args.num_shots] file_num = int(len(os.listdir(args.post_result_path)) / args.num_shots) aves_keys = ['tl', 'ta', 'vl', 'va'] for", "'va'] for n_shot in n_shots: aves_keys += ['fsa-' + str(n_shot)] aves = {k:", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "language governing permissions and # limitations under the License. # ============================================================================ \"\"\" postprocess", "required by applicable law or agreed to in writing, software # distributed under", "\"_\" + str(j) + \"_0.bin\") x_tot = Tensor(np.fromfile(f, np.float32).reshape(args.batch_size, 512)) x_shot, x_query =", "applicable law or agreed to in writing, software # distributed under the License", "under the License. # ============================================================================ \"\"\" postprocess \"\"\" import os import argparse from", "os.path.join(args.post_result_path, \"nshot_\" + str(i) + \"_\" + str(j) + \"_0.bin\") x_tot = Tensor(np.fromfile(f,", "or agreed to in writing, software # distributed under the License is distributed", "import argparse from functools import reduce import numpy as np import mindspore as", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "__name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--device_target', type=str, default='CPU', choices=['Ascend', 'GPU', 'CPU']) parser.add_argument('--dataset',", "= x_query_shape[:-3] x_shot_len = reduce(lambda x, y: x*y, shot_shape) x_query_len = reduce(lambda x,", "in enumerate(n_shots): np.random.seed(0) label_shot = label_list[i] for j in range(file_num): labels = Tensor(label_shot[j])", "print(\"epoch {}, {}-shot, val acc {:.4f}\".format(str(1), n_shot, aves[key])) if __name__ == '__main__': parser", "= reduce(lambda x, y: x*y, shot_shape) x_query_len = reduce(lambda x, y: x*y, query_shape)", "in range(file_num): labels = Tensor(label_shot[j]) f = os.path.join(args.post_result_path, \"nshot_\" + str(i) + \"_\"", "License. # ============================================================================ \"\"\" postprocess \"\"\" import os import argparse from functools import", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "x_shot_len = reduce(lambda x, y: x*y, shot_shape) x_query_len = reduce(lambda x, y: x*y,", "writing, software # distributed under the License is distributed on an \"AS IS\"", "query_shape = x_query_shape[:-3] x_shot_len = reduce(lambda x, y: x*y, shot_shape) x_query_len = reduce(lambda", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "ops.ExpandDims()(bias, 1) x_shot = x_shot.mean(axis=-2) x_shot = ops.L2Normalize(axis=-1)(x_shot) x_query = ops.L2Normalize(axis=-1)(x_query) logits =", "np.random.seed(0) label_shot = label_list[i] for j in range(file_num): labels = Tensor(label_shot[j]) f =", "License. # You may obtain a copy of the License at # #", "############ bs = x_shot.shape[0] fs = x_shot.shape[-1] bias = x_shot.view(bs, -1, fs).mean(1) -", "label_shot = label_list[i] for j in range(file_num): labels = Tensor(label_shot[j]) f = os.path.join(args.post_result_path,", "x_query = ops.L2Normalize(axis=-1)(x_query) logits = ops.BatchMatMul()(x_query, x_shot.transpose(0, 2, 1)) logits = logits *", "compliance with the License. # You may obtain a copy of the License", "governing permissions and # limitations under the License. # ============================================================================ \"\"\" postprocess \"\"\"", "parser.add_argument('--batch_size', type=int, default=320) parser.add_argument('--num_shots', type=int, default=1) args_opt = parser.parse_args() context.set_context(mode=context.GRAPH_MODE, device_target=args_opt.device_target, save_graphs=False) cal_acc(args_opt)", "logits = ops.BatchMatMul()(x_query, x_shot.transpose(0, 2, 1)) logits = logits * temp ret =", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "aves.items(): aves[k] = v.item() for n_shot in n_shots: key = 'fsa-' + str(n_shot)", "bs = x_shot.shape[0] fs = x_shot.shape[-1] bias = x_shot.view(bs, -1, fs).mean(1) - x_query.mean(1)", "\"\"\" import os import argparse from functools import reduce import numpy as np", "aves_keys} label_list = np.load(os.path.join(args.pre_result_path, \"label.npy\"), allow_pickle=True) shape_list = np.load(os.path.join(args.pre_result_path, \"shape.npy\"), allow_pickle=True) x_shot_shape =", "n_shot in n_shots: aves_keys += ['fsa-' + str(n_shot)] aves = {k: util.Averager() for", "x_tot = Tensor(np.fromfile(f, np.float32).reshape(args.batch_size, 512)) x_shot, x_query = x_tot[:x_shot_len], x_tot[-x_query_len:] x_shot = x_shot.view(*shot_shape,", "x_shot_shape = shape_list[0] x_query_shape = shape_list[1] shot_shape = x_shot_shape[:-3] query_shape = x_query_shape[:-3] x_shot_len", "ops.Argmax()(logits) == labels.astype(ms.int32) acc = ret.astype(ms.float32).mean() aves['fsa-' + str(n_shot)].add(acc.asnumpy()) for k, v in", "= reduce(lambda x, y: x*y, query_shape) for i, n_shot in enumerate(n_shots): np.random.seed(0) label_shot", "not use this file except in compliance with the License. # You may", "str(j) + \"_0.bin\") x_tot = Tensor(np.fromfile(f, np.float32).reshape(args.batch_size, 512)) x_shot, x_query = x_tot[:x_shot_len], x_tot[-x_query_len:]", "labels.astype(ms.int32) acc = ret.astype(ms.float32).mean() aves['fsa-' + str(n_shot)].add(acc.asnumpy()) for k, v in aves.items(): aves[k]", "'CPU']) parser.add_argument('--dataset', default='mini-imagenet') parser.add_argument('--post_result_path', default='./result_Files') parser.add_argument('--pre_result_path', type=str, default='./preprocess_Result') parser.add_argument('--batch_size', type=int, default=320) parser.add_argument('--num_shots', type=int,", "cal_acc(args): \"\"\" :return: meta-baseline eval \"\"\" temp = 5. n_shots = [args.num_shots] file_num", "# ============================================================================ \"\"\" postprocess \"\"\" import os import argparse from functools import reduce", "-1) x_query = x_query.view(*query_shape, -1) ########## cross-class bias ############ bs = x_shot.shape[0] fs", "License, Version 2.0 (the \"License\"); # you may not use this file except", "x, y: x*y, shot_shape) x_query_len = reduce(lambda x, y: x*y, query_shape) for i,", "aves['fsa-' + str(n_shot)].add(acc.asnumpy()) for k, v in aves.items(): aves[k] = v.item() for n_shot", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "== labels.astype(ms.int32) acc = ret.astype(ms.float32).mean() aves['fsa-' + str(n_shot)].add(acc.asnumpy()) for k, v in aves.items():", "x_query.mean(1) x_query = x_query + ops.ExpandDims()(bias, 1) x_shot = x_shot.mean(axis=-2) x_shot = ops.L2Normalize(axis=-1)(x_shot)", "# you may not use this file except in compliance with the License.", "\"label.npy\"), allow_pickle=True) shape_list = np.load(os.path.join(args.pre_result_path, \"shape.npy\"), allow_pickle=True) x_shot_shape = shape_list[0] x_query_shape = shape_list[1]", "= argparse.ArgumentParser() parser.add_argument('--device_target', type=str, default='CPU', choices=['Ascend', 'GPU', 'CPU']) parser.add_argument('--dataset', default='mini-imagenet') parser.add_argument('--post_result_path', default='./result_Files') parser.add_argument('--pre_result_path',", "x_shot_shape[:-3] query_shape = x_query_shape[:-3] x_shot_len = reduce(lambda x, y: x*y, shot_shape) x_query_len =", "agreed to in writing, software # distributed under the License is distributed on", "= 'fsa-' + str(n_shot) print(\"epoch {}, {}-shot, val acc {:.4f}\".format(str(1), n_shot, aves[key])) if", "x, y: x*y, query_shape) for i, n_shot in enumerate(n_shots): np.random.seed(0) label_shot = label_list[i]", "(the \"License\"); # you may not use this file except in compliance with", "= x_shot.view(*shot_shape, -1) x_query = x_query.view(*query_shape, -1) ########## cross-class bias ############ bs =", "x_shot.transpose(0, 2, 1)) logits = logits * temp ret = ops.Argmax()(logits) == labels.astype(ms.int32)", "allow_pickle=True) shape_list = np.load(os.path.join(args.pre_result_path, \"shape.npy\"), allow_pickle=True) x_shot_shape = shape_list[0] x_query_shape = shape_list[1] shot_shape", "5. n_shots = [args.num_shots] file_num = int(len(os.listdir(args.post_result_path)) / args.num_shots) aves_keys = ['tl', 'ta',", "specific language governing permissions and # limitations under the License. # ============================================================================ \"\"\"", "+= ['fsa-' + str(n_shot)] aves = {k: util.Averager() for k in aves_keys} label_list", "+ str(n_shot) print(\"epoch {}, {}-shot, val acc {:.4f}\".format(str(1), n_shot, aves[key])) if __name__ ==", "# Unless required by applicable law or agreed to in writing, software #", "from functools import reduce import numpy as np import mindspore as ms from", "by applicable law or agreed to in writing, software # distributed under the", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "n_shots: aves_keys += ['fsa-' + str(n_shot)] aves = {k: util.Averager() for k in", "aves[k] = v.item() for n_shot in n_shots: key = 'fsa-' + str(n_shot) print(\"epoch", "= x_shot.mean(axis=-2) x_shot = ops.L2Normalize(axis=-1)(x_shot) x_query = ops.L2Normalize(axis=-1)(x_query) logits = ops.BatchMatMul()(x_query, x_shot.transpose(0, 2,", "in n_shots: key = 'fsa-' + str(n_shot) print(\"epoch {}, {}-shot, val acc {:.4f}\".format(str(1),", "1) x_shot = x_shot.mean(axis=-2) x_shot = ops.L2Normalize(axis=-1)(x_shot) x_query = ops.L2Normalize(axis=-1)(x_query) logits = ops.BatchMatMul()(x_query,", "file except in compliance with the License. # You may obtain a copy", "= ops.L2Normalize(axis=-1)(x_query) logits = ops.BatchMatMul()(x_query, x_shot.transpose(0, 2, 1)) logits = logits * temp", "f = os.path.join(args.post_result_path, \"nshot_\" + str(i) + \"_\" + str(j) + \"_0.bin\") x_tot", "License for the specific language governing permissions and # limitations under the License.", "x_query = x_tot[:x_shot_len], x_tot[-x_query_len:] x_shot = x_shot.view(*shot_shape, -1) x_query = x_query.view(*query_shape, -1) ##########", "to in writing, software # distributed under the License is distributed on an", "and # limitations under the License. # ============================================================================ \"\"\" postprocess \"\"\" import os", "v.item() for n_shot in n_shots: key = 'fsa-' + str(n_shot) print(\"epoch {}, {}-shot,", "implied. # See the License for the specific language governing permissions and #", "\"\"\" :return: meta-baseline eval \"\"\" temp = 5. n_shots = [args.num_shots] file_num =", "shot_shape) x_query_len = reduce(lambda x, y: x*y, query_shape) for i, n_shot in enumerate(n_shots):", "\"License\"); # you may not use this file except in compliance with the", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "fs = x_shot.shape[-1] bias = x_shot.view(bs, -1, fs).mean(1) - x_query.mean(1) x_query = x_query", "functools import reduce import numpy as np import mindspore as ms from mindspore", "= v.item() for n_shot in n_shots: key = 'fsa-' + str(n_shot) print(\"epoch {},", "\"shape.npy\"), allow_pickle=True) x_shot_shape = shape_list[0] x_query_shape = shape_list[1] shot_shape = x_shot_shape[:-3] query_shape =", "or implied. # See the License for the specific language governing permissions and", "util.Averager() for k in aves_keys} label_list = np.load(os.path.join(args.pre_result_path, \"label.npy\"), allow_pickle=True) shape_list = np.load(os.path.join(args.pre_result_path,", "{}-shot, val acc {:.4f}\".format(str(1), n_shot, aves[key])) if __name__ == '__main__': parser = argparse.ArgumentParser()", "i, n_shot in enumerate(n_shots): np.random.seed(0) label_shot = label_list[i] for j in range(file_num): labels", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "temp ret = ops.Argmax()(logits) == labels.astype(ms.int32) acc = ret.astype(ms.float32).mean() aves['fsa-' + str(n_shot)].add(acc.asnumpy()) for", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "in writing, software # distributed under the License is distributed on an \"AS", "= ['tl', 'ta', 'vl', 'va'] for n_shot in n_shots: aves_keys += ['fsa-' +", "x_query_len = reduce(lambda x, y: x*y, query_shape) for i, n_shot in enumerate(n_shots): np.random.seed(0)", "x_query_shape[:-3] x_shot_len = reduce(lambda x, y: x*y, shot_shape) x_query_len = reduce(lambda x, y:", "+ str(n_shot)].add(acc.asnumpy()) for k, v in aves.items(): aves[k] = v.item() for n_shot in", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "x_tot[:x_shot_len], x_tot[-x_query_len:] x_shot = x_shot.view(*shot_shape, -1) x_query = x_query.view(*query_shape, -1) ########## cross-class bias", "x_query_shape = shape_list[1] shot_shape = x_shot_shape[:-3] query_shape = x_query_shape[:-3] x_shot_len = reduce(lambda x,", "y: x*y, shot_shape) x_query_len = reduce(lambda x, y: x*y, query_shape) for i, n_shot", "\"nshot_\" + str(i) + \"_\" + str(j) + \"_0.bin\") x_tot = Tensor(np.fromfile(f, np.float32).reshape(args.batch_size,", "512)) x_shot, x_query = x_tot[:x_shot_len], x_tot[-x_query_len:] x_shot = x_shot.view(*shot_shape, -1) x_query = x_query.view(*query_shape,", "Tensor(label_shot[j]) f = os.path.join(args.post_result_path, \"nshot_\" + str(i) + \"_\" + str(j) + \"_0.bin\")", "+ ops.ExpandDims()(bias, 1) x_shot = x_shot.mean(axis=-2) x_shot = ops.L2Normalize(axis=-1)(x_shot) x_query = ops.L2Normalize(axis=-1)(x_query) logits", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "you may not use this file except in compliance with the License. #", "\"\"\" temp = 5. n_shots = [args.num_shots] file_num = int(len(os.listdir(args.post_result_path)) / args.num_shots) aves_keys", "{}, {}-shot, val acc {:.4f}\".format(str(1), n_shot, aves[key])) if __name__ == '__main__': parser =", "eval \"\"\" temp = 5. n_shots = [args.num_shots] file_num = int(len(os.listdir(args.post_result_path)) / args.num_shots)", "key = 'fsa-' + str(n_shot) print(\"epoch {}, {}-shot, val acc {:.4f}\".format(str(1), n_shot, aves[key]))", "reduce(lambda x, y: x*y, shot_shape) x_query_len = reduce(lambda x, y: x*y, query_shape) for", "ops.L2Normalize(axis=-1)(x_query) logits = ops.BatchMatMul()(x_query, x_shot.transpose(0, 2, 1)) logits = logits * temp ret", "use this file except in compliance with the License. # You may obtain", "for n_shot in n_shots: aves_keys += ['fsa-' + str(n_shot)] aves = {k: util.Averager()", "= x_shot.view(bs, -1, fs).mean(1) - x_query.mean(1) x_query = x_query + ops.ExpandDims()(bias, 1) x_shot", "label_list[i] for j in range(file_num): labels = Tensor(label_shot[j]) f = os.path.join(args.post_result_path, \"nshot_\" +", "= x_shot_shape[:-3] query_shape = x_query_shape[:-3] x_shot_len = reduce(lambda x, y: x*y, shot_shape) x_query_len", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "logits * temp ret = ops.Argmax()(logits) == labels.astype(ms.int32) acc = ret.astype(ms.float32).mean() aves['fsa-' +", "ops.BatchMatMul()(x_query, x_shot.transpose(0, 2, 1)) logits = logits * temp ret = ops.Argmax()(logits) ==", "x_query.view(*query_shape, -1) ########## cross-class bias ############ bs = x_shot.shape[0] fs = x_shot.shape[-1] bias", "default='CPU', choices=['Ascend', 'GPU', 'CPU']) parser.add_argument('--dataset', default='mini-imagenet') parser.add_argument('--post_result_path', default='./result_Files') parser.add_argument('--pre_result_path', type=str, default='./preprocess_Result') parser.add_argument('--batch_size', type=int,", "= label_list[i] for j in range(file_num): labels = Tensor(label_shot[j]) f = os.path.join(args.post_result_path, \"nshot_\"", "import numpy as np import mindspore as ms from mindspore import ops, Tensor,", "str(n_shot) print(\"epoch {}, {}-shot, val acc {:.4f}\".format(str(1), n_shot, aves[key])) if __name__ == '__main__':", "2.0 (the \"License\"); # you may not use this file except in compliance", "'vl', 'va'] for n_shot in n_shots: aves_keys += ['fsa-' + str(n_shot)] aves =", "parser.add_argument('--device_target', type=str, default='CPU', choices=['Ascend', 'GPU', 'CPU']) parser.add_argument('--dataset', default='mini-imagenet') parser.add_argument('--post_result_path', default='./result_Files') parser.add_argument('--pre_result_path', type=str, default='./preprocess_Result')", "allow_pickle=True) x_shot_shape = shape_list[0] x_query_shape = shape_list[1] shot_shape = x_shot_shape[:-3] query_shape = x_query_shape[:-3]", "for the specific language governing permissions and # limitations under the License. #", "from mindspore import ops, Tensor, context import src.util as util def cal_acc(args): \"\"\"", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "x_query = x_query.view(*query_shape, -1) ########## cross-class bias ############ bs = x_shot.shape[0] fs =", "aves_keys = ['tl', 'ta', 'vl', 'va'] for n_shot in n_shots: aves_keys += ['fsa-'", "file_num = int(len(os.listdir(args.post_result_path)) / args.num_shots) aves_keys = ['tl', 'ta', 'vl', 'va'] for n_shot", "# # Unless required by applicable law or agreed to in writing, software", "argparse from functools import reduce import numpy as np import mindspore as ms", "express or implied. # See the License for the specific language governing permissions", "+ str(n_shot)] aves = {k: util.Averager() for k in aves_keys} label_list = np.load(os.path.join(args.pre_result_path,", "+ str(i) + \"_\" + str(j) + \"_0.bin\") x_tot = Tensor(np.fromfile(f, np.float32).reshape(args.batch_size, 512))", "1)) logits = logits * temp ret = ops.Argmax()(logits) == labels.astype(ms.int32) acc =", "Tensor(np.fromfile(f, np.float32).reshape(args.batch_size, 512)) x_shot, x_query = x_tot[:x_shot_len], x_tot[-x_query_len:] x_shot = x_shot.view(*shot_shape, -1) x_query", "str(i) + \"_\" + str(j) + \"_0.bin\") x_tot = Tensor(np.fromfile(f, np.float32).reshape(args.batch_size, 512)) x_shot,", "['tl', 'ta', 'vl', 'va'] for n_shot in n_shots: aves_keys += ['fsa-' + str(n_shot)]", "either express or implied. # See the License for the specific language governing", "= x_query.view(*query_shape, -1) ########## cross-class bias ############ bs = x_shot.shape[0] fs = x_shot.shape[-1]", "x*y, query_shape) for i, n_shot in enumerate(n_shots): np.random.seed(0) label_shot = label_list[i] for j", "x_shot.shape[-1] bias = x_shot.view(bs, -1, fs).mean(1) - x_query.mean(1) x_query = x_query + ops.ExpandDims()(bias,", "labels = Tensor(label_shot[j]) f = os.path.join(args.post_result_path, \"nshot_\" + str(i) + \"_\" + str(j)", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "for i, n_shot in enumerate(n_shots): np.random.seed(0) label_shot = label_list[i] for j in range(file_num):", "v in aves.items(): aves[k] = v.item() for n_shot in n_shots: key = 'fsa-'", "shape_list[1] shot_shape = x_shot_shape[:-3] query_shape = x_query_shape[:-3] x_shot_len = reduce(lambda x, y: x*y,", "'__main__': parser = argparse.ArgumentParser() parser.add_argument('--device_target', type=str, default='CPU', choices=['Ascend', 'GPU', 'CPU']) parser.add_argument('--dataset', default='mini-imagenet') parser.add_argument('--post_result_path',", "2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version", "type=str, default='CPU', choices=['Ascend', 'GPU', 'CPU']) parser.add_argument('--dataset', default='mini-imagenet') parser.add_argument('--post_result_path', default='./result_Files') parser.add_argument('--pre_result_path', type=str, default='./preprocess_Result') parser.add_argument('--batch_size',", "import src.util as util def cal_acc(args): \"\"\" :return: meta-baseline eval \"\"\" temp =", "x*y, shot_shape) x_query_len = reduce(lambda x, y: x*y, query_shape) for i, n_shot in", "# limitations under the License. # ============================================================================ \"\"\" postprocess \"\"\" import os import", "the License. # You may obtain a copy of the License at #", "str(n_shot)] aves = {k: util.Averager() for k in aves_keys} label_list = np.load(os.path.join(args.pre_result_path, \"label.npy\"),", "for n_shot in n_shots: key = 'fsa-' + str(n_shot) print(\"epoch {}, {}-shot, val", "import reduce import numpy as np import mindspore as ms from mindspore import", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "============================================================================ \"\"\" postprocess \"\"\" import os import argparse from functools import reduce import", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "shot_shape = x_shot_shape[:-3] query_shape = x_query_shape[:-3] x_shot_len = reduce(lambda x, y: x*y, shot_shape)", "Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License,", "+ str(j) + \"_0.bin\") x_tot = Tensor(np.fromfile(f, np.float32).reshape(args.batch_size, 512)) x_shot, x_query = x_tot[:x_shot_len],", "Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the", "= shape_list[0] x_query_shape = shape_list[1] shot_shape = x_shot_shape[:-3] query_shape = x_query_shape[:-3] x_shot_len =", "numpy as np import mindspore as ms from mindspore import ops, Tensor, context", "in aves_keys} label_list = np.load(os.path.join(args.pre_result_path, \"label.npy\"), allow_pickle=True) shape_list = np.load(os.path.join(args.pre_result_path, \"shape.npy\"), allow_pickle=True) x_shot_shape", "src.util as util def cal_acc(args): \"\"\" :return: meta-baseline eval \"\"\" temp = 5.", "n_shots = [args.num_shots] file_num = int(len(os.listdir(args.post_result_path)) / args.num_shots) aves_keys = ['tl', 'ta', 'vl',", "= {k: util.Averager() for k in aves_keys} label_list = np.load(os.path.join(args.pre_result_path, \"label.npy\"), allow_pickle=True) shape_list", "acc = ret.astype(ms.float32).mean() aves['fsa-' + str(n_shot)].add(acc.asnumpy()) for k, v in aves.items(): aves[k] =", "with the License. # You may obtain a copy of the License at", "ops, Tensor, context import src.util as util def cal_acc(args): \"\"\" :return: meta-baseline eval", "default='./preprocess_Result') parser.add_argument('--batch_size', type=int, default=320) parser.add_argument('--num_shots', type=int, default=1) args_opt = parser.parse_args() context.set_context(mode=context.GRAPH_MODE, device_target=args_opt.device_target, save_graphs=False)", "x_shot = ops.L2Normalize(axis=-1)(x_shot) x_query = ops.L2Normalize(axis=-1)(x_query) logits = ops.BatchMatMul()(x_query, x_shot.transpose(0, 2, 1)) logits", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "= x_tot[:x_shot_len], x_tot[-x_query_len:] x_shot = x_shot.view(*shot_shape, -1) x_query = x_query.view(*query_shape, -1) ########## cross-class", "-1, fs).mean(1) - x_query.mean(1) x_query = x_query + ops.ExpandDims()(bias, 1) x_shot = x_shot.mean(axis=-2)", "temp = 5. n_shots = [args.num_shots] file_num = int(len(os.listdir(args.post_result_path)) / args.num_shots) aves_keys =", ":return: meta-baseline eval \"\"\" temp = 5. n_shots = [args.num_shots] file_num = int(len(os.listdir(args.post_result_path))", "x_shot = x_shot.mean(axis=-2) x_shot = ops.L2Normalize(axis=-1)(x_shot) x_query = ops.L2Normalize(axis=-1)(x_query) logits = ops.BatchMatMul()(x_query, x_shot.transpose(0,", "x_shot.shape[0] fs = x_shot.shape[-1] bias = x_shot.view(bs, -1, fs).mean(1) - x_query.mean(1) x_query =", "acc {:.4f}\".format(str(1), n_shot, aves[key])) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--device_target', type=str,", "y: x*y, query_shape) for i, n_shot in enumerate(n_shots): np.random.seed(0) label_shot = label_list[i] for", "ret = ops.Argmax()(logits) == labels.astype(ms.int32) acc = ret.astype(ms.float32).mean() aves['fsa-' + str(n_shot)].add(acc.asnumpy()) for k,", "aves_keys += ['fsa-' + str(n_shot)] aves = {k: util.Averager() for k in aves_keys}", "law or agreed to in writing, software # distributed under the License is", "for j in range(file_num): labels = Tensor(label_shot[j]) f = os.path.join(args.post_result_path, \"nshot_\" + str(i)", "the License for the specific language governing permissions and # limitations under the", "= np.load(os.path.join(args.pre_result_path, \"shape.npy\"), allow_pickle=True) x_shot_shape = shape_list[0] x_query_shape = shape_list[1] shot_shape = x_shot_shape[:-3]", "- x_query.mean(1) x_query = x_query + ops.ExpandDims()(bias, 1) x_shot = x_shot.mean(axis=-2) x_shot =", "x_shot.view(*shot_shape, -1) x_query = x_query.view(*query_shape, -1) ########## cross-class bias ############ bs = x_shot.shape[0]", "the License. # ============================================================================ \"\"\" postprocess \"\"\" import os import argparse from functools", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "import mindspore as ms from mindspore import ops, Tensor, context import src.util as", "bias ############ bs = x_shot.shape[0] fs = x_shot.shape[-1] bias = x_shot.view(bs, -1, fs).mean(1)", "ops.L2Normalize(axis=-1)(x_shot) x_query = ops.L2Normalize(axis=-1)(x_query) logits = ops.BatchMatMul()(x_query, x_shot.transpose(0, 2, 1)) logits = logits", "mindspore import ops, Tensor, context import src.util as util def cal_acc(args): \"\"\" :return:", "as util def cal_acc(args): \"\"\" :return: meta-baseline eval \"\"\" temp = 5. n_shots", "= logits * temp ret = ops.Argmax()(logits) == labels.astype(ms.int32) acc = ret.astype(ms.float32).mean() aves['fsa-'", "val acc {:.4f}\".format(str(1), n_shot, aves[key])) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--device_target',", "logits = logits * temp ret = ops.Argmax()(logits) == labels.astype(ms.int32) acc = ret.astype(ms.float32).mean()", "in compliance with the License. # You may obtain a copy of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "shape_list = np.load(os.path.join(args.pre_result_path, \"shape.npy\"), allow_pickle=True) x_shot_shape = shape_list[0] x_query_shape = shape_list[1] shot_shape =", "default='./result_Files') parser.add_argument('--pre_result_path', type=str, default='./preprocess_Result') parser.add_argument('--batch_size', type=int, default=320) parser.add_argument('--num_shots', type=int, default=1) args_opt = parser.parse_args()", "= 5. n_shots = [args.num_shots] file_num = int(len(os.listdir(args.post_result_path)) / args.num_shots) aves_keys = ['tl',", "x_shot, x_query = x_tot[:x_shot_len], x_tot[-x_query_len:] x_shot = x_shot.view(*shot_shape, -1) x_query = x_query.view(*query_shape, -1)", "See the License for the specific language governing permissions and # limitations under", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "util def cal_acc(args): \"\"\" :return: meta-baseline eval \"\"\" temp = 5. n_shots =", "in aves.items(): aves[k] = v.item() for n_shot in n_shots: key = 'fsa-' +", "\"_0.bin\") x_tot = Tensor(np.fromfile(f, np.float32).reshape(args.batch_size, 512)) x_shot, x_query = x_tot[:x_shot_len], x_tot[-x_query_len:] x_shot =", "= ops.BatchMatMul()(x_query, x_shot.transpose(0, 2, 1)) logits = logits * temp ret = ops.Argmax()(logits)", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "ret.astype(ms.float32).mean() aves['fsa-' + str(n_shot)].add(acc.asnumpy()) for k, v in aves.items(): aves[k] = v.item() for", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "reduce(lambda x, y: x*y, query_shape) for i, n_shot in enumerate(n_shots): np.random.seed(0) label_shot =", "if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--device_target', type=str, default='CPU', choices=['Ascend', 'GPU', 'CPU'])", "cross-class bias ############ bs = x_shot.shape[0] fs = x_shot.shape[-1] bias = x_shot.view(bs, -1,", "'fsa-' + str(n_shot) print(\"epoch {}, {}-shot, val acc {:.4f}\".format(str(1), n_shot, aves[key])) if __name__", "= ops.L2Normalize(axis=-1)(x_shot) x_query = ops.L2Normalize(axis=-1)(x_query) logits = ops.BatchMatMul()(x_query, x_shot.transpose(0, 2, 1)) logits =", "meta-baseline eval \"\"\" temp = 5. n_shots = [args.num_shots] file_num = int(len(os.listdir(args.post_result_path)) /", "permissions and # limitations under the License. # ============================================================================ \"\"\" postprocess \"\"\" import", "'ta', 'vl', 'va'] for n_shot in n_shots: aves_keys += ['fsa-' + str(n_shot)] aves", "aves = {k: util.Averager() for k in aves_keys} label_list = np.load(os.path.join(args.pre_result_path, \"label.npy\"), allow_pickle=True)", "########## cross-class bias ############ bs = x_shot.shape[0] fs = x_shot.shape[-1] bias = x_shot.view(bs,", "str(n_shot)].add(acc.asnumpy()) for k, v in aves.items(): aves[k] = v.item() for n_shot in n_shots:", "Version 2.0 (the \"License\"); # you may not use this file except in", "except in compliance with the License. # You may obtain a copy of", "default='mini-imagenet') parser.add_argument('--post_result_path', default='./result_Files') parser.add_argument('--pre_result_path', type=str, default='./preprocess_Result') parser.add_argument('--batch_size', type=int, default=320) parser.add_argument('--num_shots', type=int, default=1) args_opt", "= os.path.join(args.post_result_path, \"nshot_\" + str(i) + \"_\" + str(j) + \"_0.bin\") x_tot =", "parser = argparse.ArgumentParser() parser.add_argument('--device_target', type=str, default='CPU', choices=['Ascend', 'GPU', 'CPU']) parser.add_argument('--dataset', default='mini-imagenet') parser.add_argument('--post_result_path', default='./result_Files')", "= x_shot.shape[0] fs = x_shot.shape[-1] bias = x_shot.view(bs, -1, fs).mean(1) - x_query.mean(1) x_query", "n_shot, aves[key])) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--device_target', type=str, default='CPU', choices=['Ascend',", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "for k, v in aves.items(): aves[k] = v.item() for n_shot in n_shots: key", "== '__main__': parser = argparse.ArgumentParser() parser.add_argument('--device_target', type=str, default='CPU', choices=['Ascend', 'GPU', 'CPU']) parser.add_argument('--dataset', default='mini-imagenet')", "-1) ########## cross-class bias ############ bs = x_shot.shape[0] fs = x_shot.shape[-1] bias =", "= ops.Argmax()(logits) == labels.astype(ms.int32) acc = ret.astype(ms.float32).mean() aves['fsa-' + str(n_shot)].add(acc.asnumpy()) for k, v", "the specific language governing permissions and # limitations under the License. # ============================================================================", "range(file_num): labels = Tensor(label_shot[j]) f = os.path.join(args.post_result_path, \"nshot_\" + str(i) + \"_\" +", "aves[key])) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--device_target', type=str, default='CPU', choices=['Ascend', 'GPU',", "+ \"_0.bin\") x_tot = Tensor(np.fromfile(f, np.float32).reshape(args.batch_size, 512)) x_shot, x_query = x_tot[:x_shot_len], x_tot[-x_query_len:] x_shot", "'GPU', 'CPU']) parser.add_argument('--dataset', default='mini-imagenet') parser.add_argument('--post_result_path', default='./result_Files') parser.add_argument('--pre_result_path', type=str, default='./preprocess_Result') parser.add_argument('--batch_size', type=int, default=320) parser.add_argument('--num_shots',", "= Tensor(np.fromfile(f, np.float32).reshape(args.batch_size, 512)) x_shot, x_query = x_tot[:x_shot_len], x_tot[-x_query_len:] x_shot = x_shot.view(*shot_shape, -1)", "label_list = np.load(os.path.join(args.pre_result_path, \"label.npy\"), allow_pickle=True) shape_list = np.load(os.path.join(args.pre_result_path, \"shape.npy\"), allow_pickle=True) x_shot_shape = shape_list[0]", "= shape_list[1] shot_shape = x_shot_shape[:-3] query_shape = x_query_shape[:-3] x_shot_len = reduce(lambda x, y:", "n_shot in n_shots: key = 'fsa-' + str(n_shot) print(\"epoch {}, {}-shot, val acc", "x_query + ops.ExpandDims()(bias, 1) x_shot = x_shot.mean(axis=-2) x_shot = ops.L2Normalize(axis=-1)(x_shot) x_query = ops.L2Normalize(axis=-1)(x_query)", "as ms from mindspore import ops, Tensor, context import src.util as util def", "= Tensor(label_shot[j]) f = os.path.join(args.post_result_path, \"nshot_\" + str(i) + \"_\" + str(j) +", "= ret.astype(ms.float32).mean() aves['fsa-' + str(n_shot)].add(acc.asnumpy()) for k, v in aves.items(): aves[k] = v.item()", "Ltd # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "['fsa-' + str(n_shot)] aves = {k: util.Averager() for k in aves_keys} label_list =", "np.load(os.path.join(args.pre_result_path, \"shape.npy\"), allow_pickle=True) x_shot_shape = shape_list[0] x_query_shape = shape_list[1] shot_shape = x_shot_shape[:-3] query_shape", "shape_list[0] x_query_shape = shape_list[1] shot_shape = x_shot_shape[:-3] query_shape = x_query_shape[:-3] x_shot_len = reduce(lambda", "postprocess \"\"\" import os import argparse from functools import reduce import numpy as", "n_shot in enumerate(n_shots): np.random.seed(0) label_shot = label_list[i] for j in range(file_num): labels =", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "parser.add_argument('--pre_result_path', type=str, default='./preprocess_Result') parser.add_argument('--batch_size', type=int, default=320) parser.add_argument('--num_shots', type=int, default=1) args_opt = parser.parse_args() context.set_context(mode=context.GRAPH_MODE,", "{:.4f}\".format(str(1), n_shot, aves[key])) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--device_target', type=str, default='CPU',", "import os import argparse from functools import reduce import numpy as np import", "= np.load(os.path.join(args.pre_result_path, \"label.npy\"), allow_pickle=True) shape_list = np.load(os.path.join(args.pre_result_path, \"shape.npy\"), allow_pickle=True) x_shot_shape = shape_list[0] x_query_shape" ]
[ "for x in range(5): element=int(input()) arr.append(element) print(\"Sum of possible odd length sub-arrays:\") print(OddLengthSum(arr))", "enter the required array as shown [1,4,2,5,3] def OddLengthSum(arr): sum=0 l=len(arr) for i", "OddLengthSum(arr): sum=0 l=len(arr) for i in range(l): for j in range(i,l,2): for k", "For the output shown in example enter the required array as shown [1,4,2,5,3]", "sum=0 l=len(arr) for i in range(l): for j in range(i,l,2): for k in", "shown in example enter the required array as shown [1,4,2,5,3] def OddLengthSum(arr): sum=0", "return sum print(\"Enter the array of 5 elements: \") arr = [] for", "= [] for x in range(5): element=int(input()) arr.append(element) print(\"Sum of possible odd length", "j in range(i,l,2): for k in range(i,j+1,1): sum+=arr[k] return sum print(\"Enter the array", "output shown in example enter the required array as shown [1,4,2,5,3] def OddLengthSum(arr):", "for j in range(i,l,2): for k in range(i,j+1,1): sum+=arr[k] return sum print(\"Enter the", "i in range(l): for j in range(i,l,2): for k in range(i,j+1,1): sum+=arr[k] return", "def OddLengthSum(arr): sum=0 l=len(arr) for i in range(l): for j in range(i,l,2): for", "sum+=arr[k] return sum print(\"Enter the array of 5 elements: \") arr = []", "[] for x in range(5): element=int(input()) arr.append(element) print(\"Sum of possible odd length sub-arrays:\")", "required array as shown [1,4,2,5,3] def OddLengthSum(arr): sum=0 l=len(arr) for i in range(l):", "as shown [1,4,2,5,3] def OddLengthSum(arr): sum=0 l=len(arr) for i in range(l): for j", "\") arr = [] for x in range(5): element=int(input()) arr.append(element) print(\"Sum of possible", "print(\"Enter the array of 5 elements: \") arr = [] for x in", "array as shown [1,4,2,5,3] def OddLengthSum(arr): sum=0 l=len(arr) for i in range(l): for", "arr = [] for x in range(5): element=int(input()) arr.append(element) print(\"Sum of possible odd", "the array of 5 elements: \") arr = [] for x in range(5):", "sum print(\"Enter the array of 5 elements: \") arr = [] for x", "5 elements: \") arr = [] for x in range(5): element=int(input()) arr.append(element) print(\"Sum", "example enter the required array as shown [1,4,2,5,3] def OddLengthSum(arr): sum=0 l=len(arr) for", "the output shown in example enter the required array as shown [1,4,2,5,3] def", "in example enter the required array as shown [1,4,2,5,3] def OddLengthSum(arr): sum=0 l=len(arr)", "range(l): for j in range(i,l,2): for k in range(i,j+1,1): sum+=arr[k] return sum print(\"Enter", "array of 5 elements: \") arr = [] for x in range(5): element=int(input())", "<gh_stars>10-100 # For the output shown in example enter the required array as", "range(i,j+1,1): sum+=arr[k] return sum print(\"Enter the array of 5 elements: \") arr =", "in range(i,l,2): for k in range(i,j+1,1): sum+=arr[k] return sum print(\"Enter the array of", "in range(i,j+1,1): sum+=arr[k] return sum print(\"Enter the array of 5 elements: \") arr", "of 5 elements: \") arr = [] for x in range(5): element=int(input()) arr.append(element)", "# For the output shown in example enter the required array as shown", "k in range(i,j+1,1): sum+=arr[k] return sum print(\"Enter the array of 5 elements: \")", "[1,4,2,5,3] def OddLengthSum(arr): sum=0 l=len(arr) for i in range(l): for j in range(i,l,2):", "for k in range(i,j+1,1): sum+=arr[k] return sum print(\"Enter the array of 5 elements:", "elements: \") arr = [] for x in range(5): element=int(input()) arr.append(element) print(\"Sum of", "shown [1,4,2,5,3] def OddLengthSum(arr): sum=0 l=len(arr) for i in range(l): for j in", "l=len(arr) for i in range(l): for j in range(i,l,2): for k in range(i,j+1,1):", "for i in range(l): for j in range(i,l,2): for k in range(i,j+1,1): sum+=arr[k]", "range(i,l,2): for k in range(i,j+1,1): sum+=arr[k] return sum print(\"Enter the array of 5", "the required array as shown [1,4,2,5,3] def OddLengthSum(arr): sum=0 l=len(arr) for i in", "in range(l): for j in range(i,l,2): for k in range(i,j+1,1): sum+=arr[k] return sum" ]
[ "cam.resolution = (512, 512) cam.start_preview() sleep(4) byte_buffer = StringIO() byte_buffer.seek(0) cam.start_recording('/home/test.mjpeg', format='mjpeg') cam.wait_recording(10)", "glob from os.path import getsize if __name__ == \"__main__\": tries = 0 while", "PiCameraMMALError from time import sleep from io import StringIO from glob import glob", "import glob from os.path import getsize if __name__ == \"__main__\": tries = 0", "import getsize if __name__ == \"__main__\": tries = 0 while tries < 5:", "# Sometimes happens if something else is hogging the resource sleep(10) continue cam.resolution", "import StringIO from glob import glob from os.path import getsize if __name__ ==", "picamera import PiCamera from picamera.exc import PiCameraMMALError from time import sleep from io", "5: try: cam = PiCamera(camera_num=0) except PiCameraMMALError: # Sometimes happens if something else", "= (512, 512) cam.start_preview() sleep(4) byte_buffer = StringIO() byte_buffer.seek(0) cam.start_recording('/home/test.mjpeg', format='mjpeg') cam.wait_recording(10) cam.stop_recording()", "sleep(4) byte_buffer = StringIO() byte_buffer.seek(0) cam.start_recording('/home/test.mjpeg', format='mjpeg') cam.wait_recording(10) cam.stop_recording() cam.capture('/home/foo.jpeg') cam.stop_preview() print(\"Recording\") cam.close()", "byte_buffer.seek(0) cam.start_recording('/home/test.mjpeg', format='mjpeg') cam.wait_recording(10) cam.stop_recording() cam.capture('/home/foo.jpeg') cam.stop_preview() print(\"Recording\") cam.close() print(glob(\"/home/*\")) print(getsize('home/test.mjpeg')) print(getsize('home/foo.jpeg')) print(byte_buffer.read())", "tries < 5: try: cam = PiCamera(camera_num=0) except PiCameraMMALError: # Sometimes happens if", "from picamera.exc import PiCameraMMALError from time import sleep from io import StringIO from", "hogging the resource sleep(10) continue cam.resolution = (512, 512) cam.start_preview() sleep(4) byte_buffer =", "the resource sleep(10) continue cam.resolution = (512, 512) cam.start_preview() sleep(4) byte_buffer = StringIO()", "except PiCameraMMALError: # Sometimes happens if something else is hogging the resource sleep(10)", "from os.path import getsize if __name__ == \"__main__\": tries = 0 while tries", "0 while tries < 5: try: cam = PiCamera(camera_num=0) except PiCameraMMALError: # Sometimes", "import PiCamera from picamera.exc import PiCameraMMALError from time import sleep from io import", "is hogging the resource sleep(10) continue cam.resolution = (512, 512) cam.start_preview() sleep(4) byte_buffer", "continue cam.resolution = (512, 512) cam.start_preview() sleep(4) byte_buffer = StringIO() byte_buffer.seek(0) cam.start_recording('/home/test.mjpeg', format='mjpeg')", "import sleep from io import StringIO from glob import glob from os.path import", "from picamera import PiCamera from picamera.exc import PiCameraMMALError from time import sleep from", "from glob import glob from os.path import getsize if __name__ == \"__main__\": tries", "if __name__ == \"__main__\": tries = 0 while tries < 5: try: cam", "os.path import getsize if __name__ == \"__main__\": tries = 0 while tries <", "tries = 0 while tries < 5: try: cam = PiCamera(camera_num=0) except PiCameraMMALError:", "while tries < 5: try: cam = PiCamera(camera_num=0) except PiCameraMMALError: # Sometimes happens", "cam = PiCamera(camera_num=0) except PiCameraMMALError: # Sometimes happens if something else is hogging", "StringIO() byte_buffer.seek(0) cam.start_recording('/home/test.mjpeg', format='mjpeg') cam.wait_recording(10) cam.stop_recording() cam.capture('/home/foo.jpeg') cam.stop_preview() print(\"Recording\") cam.close() print(glob(\"/home/*\")) print(getsize('home/test.mjpeg')) print(getsize('home/foo.jpeg'))", "PiCamera(camera_num=0) except PiCameraMMALError: # Sometimes happens if something else is hogging the resource", "getsize if __name__ == \"__main__\": tries = 0 while tries < 5: try:", "something else is hogging the resource sleep(10) continue cam.resolution = (512, 512) cam.start_preview()", "else is hogging the resource sleep(10) continue cam.resolution = (512, 512) cam.start_preview() sleep(4)", "512) cam.start_preview() sleep(4) byte_buffer = StringIO() byte_buffer.seek(0) cam.start_recording('/home/test.mjpeg', format='mjpeg') cam.wait_recording(10) cam.stop_recording() cam.capture('/home/foo.jpeg') cam.stop_preview()", "import PiCameraMMALError from time import sleep from io import StringIO from glob import", "resource sleep(10) continue cam.resolution = (512, 512) cam.start_preview() sleep(4) byte_buffer = StringIO() byte_buffer.seek(0)", "= PiCamera(camera_num=0) except PiCameraMMALError: # Sometimes happens if something else is hogging the", "glob import glob from os.path import getsize if __name__ == \"__main__\": tries =", "== \"__main__\": tries = 0 while tries < 5: try: cam = PiCamera(camera_num=0)", "< 5: try: cam = PiCamera(camera_num=0) except PiCameraMMALError: # Sometimes happens if something", "= StringIO() byte_buffer.seek(0) cam.start_recording('/home/test.mjpeg', format='mjpeg') cam.wait_recording(10) cam.stop_recording() cam.capture('/home/foo.jpeg') cam.stop_preview() print(\"Recording\") cam.close() print(glob(\"/home/*\")) print(getsize('home/test.mjpeg'))", "from time import sleep from io import StringIO from glob import glob from", "StringIO from glob import glob from os.path import getsize if __name__ == \"__main__\":", "__name__ == \"__main__\": tries = 0 while tries < 5: try: cam =", "byte_buffer = StringIO() byte_buffer.seek(0) cam.start_recording('/home/test.mjpeg', format='mjpeg') cam.wait_recording(10) cam.stop_recording() cam.capture('/home/foo.jpeg') cam.stop_preview() print(\"Recording\") cam.close() print(glob(\"/home/*\"))", "cam.start_preview() sleep(4) byte_buffer = StringIO() byte_buffer.seek(0) cam.start_recording('/home/test.mjpeg', format='mjpeg') cam.wait_recording(10) cam.stop_recording() cam.capture('/home/foo.jpeg') cam.stop_preview() print(\"Recording\")", "= 0 while tries < 5: try: cam = PiCamera(camera_num=0) except PiCameraMMALError: #", "try: cam = PiCamera(camera_num=0) except PiCameraMMALError: # Sometimes happens if something else is", "if something else is hogging the resource sleep(10) continue cam.resolution = (512, 512)", "io import StringIO from glob import glob from os.path import getsize if __name__", "PiCameraMMALError: # Sometimes happens if something else is hogging the resource sleep(10) continue", "from io import StringIO from glob import glob from os.path import getsize if", "sleep from io import StringIO from glob import glob from os.path import getsize", "\"__main__\": tries = 0 while tries < 5: try: cam = PiCamera(camera_num=0) except", "happens if something else is hogging the resource sleep(10) continue cam.resolution = (512,", "sleep(10) continue cam.resolution = (512, 512) cam.start_preview() sleep(4) byte_buffer = StringIO() byte_buffer.seek(0) cam.start_recording('/home/test.mjpeg',", "(512, 512) cam.start_preview() sleep(4) byte_buffer = StringIO() byte_buffer.seek(0) cam.start_recording('/home/test.mjpeg', format='mjpeg') cam.wait_recording(10) cam.stop_recording() cam.capture('/home/foo.jpeg')", "picamera.exc import PiCameraMMALError from time import sleep from io import StringIO from glob", "time import sleep from io import StringIO from glob import glob from os.path", "Sometimes happens if something else is hogging the resource sleep(10) continue cam.resolution =", "PiCamera from picamera.exc import PiCameraMMALError from time import sleep from io import StringIO" ]
[ "import torch.nn as nn import torch.nn.functional as F import numpy as np from", "as np from .encoder import BiLstmEncoder from .classifier import AttClassifier from torch.autograd import", "from torch.autograd import Variable from torch.nn import functional, init class MGLattice_model(nn.Module): def __init__(self,", "as F import numpy as np from .encoder import BiLstmEncoder from .classifier import", ".classifier import AttClassifier from torch.autograd import Variable from torch.nn import functional, init class", "char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs, ins_label, scope): # ins_num * seq_len * hidden_dim", "torch.nn.functional as F import numpy as np from .encoder import BiLstmEncoder from .classifier", "char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs) # batch_size * num_classes logit = self.classifier.get_logit(hidden_out, ins_label, scope)", "* seq_len * hidden_dim hidden_out = self.encoder.get_seq_features(gaz_list, word_inputs, biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover,", "pos1_inputs, pos2_inputs) # batch_size * num_classes logit = self.classifier.get_logit(hidden_out, ins_label, scope) return logit", "torch.autograd as autograd import torch.nn as nn import torch.nn.functional as F import numpy", "= BiLstmEncoder(data) # Attentive classifier self.classifier = AttClassifier(data) def forward(self, gaz_list, word_inputs, biword_inputs,", "torch.autograd import Variable from torch.nn import functional, init class MGLattice_model(nn.Module): def __init__(self, data):", "biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs, ins_label, scope): # ins_num * seq_len", "biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs) # batch_size * num_classes logit =", "Attentive classifier self.classifier = AttClassifier(data) def forward(self, gaz_list, word_inputs, biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths,", "as nn import torch.nn.functional as F import numpy as np from .encoder import", "word_inputs, biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs, ins_label, scope): # ins_num *", "scope): # ins_num * seq_len * hidden_dim hidden_out = self.encoder.get_seq_features(gaz_list, word_inputs, biword_inputs, word_seq_lengths,", "classifier self.classifier = AttClassifier(data) def forward(self, gaz_list, word_inputs, biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover,", "data): super(MGLattice_model, self).__init__() # MG-Lattice encoder self.encoder = BiLstmEncoder(data) # Attentive classifier self.classifier", "self).__init__() # MG-Lattice encoder self.encoder = BiLstmEncoder(data) # Attentive classifier self.classifier = AttClassifier(data)", "word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs) # batch_size * num_classes logit = self.classifier.get_logit(hidden_out,", "torch.nn import functional, init class MGLattice_model(nn.Module): def __init__(self, data): super(MGLattice_model, self).__init__() # MG-Lattice", "MGLattice_model(nn.Module): def __init__(self, data): super(MGLattice_model, self).__init__() # MG-Lattice encoder self.encoder = BiLstmEncoder(data) #", "seq_len * hidden_dim hidden_out = self.encoder.get_seq_features(gaz_list, word_inputs, biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs,", "import numpy as np from .encoder import BiLstmEncoder from .classifier import AttClassifier from", "pos1_inputs, pos2_inputs, ins_label, scope): # ins_num * seq_len * hidden_dim hidden_out = self.encoder.get_seq_features(gaz_list,", "import AttClassifier from torch.autograd import Variable from torch.nn import functional, init class MGLattice_model(nn.Module):", "super(MGLattice_model, self).__init__() # MG-Lattice encoder self.encoder = BiLstmEncoder(data) # Attentive classifier self.classifier =", "char_seq_recover, pos1_inputs, pos2_inputs) # batch_size * num_classes logit = self.classifier.get_logit(hidden_out, ins_label, scope) return", "encoder self.encoder = BiLstmEncoder(data) # Attentive classifier self.classifier = AttClassifier(data) def forward(self, gaz_list,", "import torch.nn.functional as F import numpy as np from .encoder import BiLstmEncoder from", "def forward(self, gaz_list, word_inputs, biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs, ins_label, scope):", "forward(self, gaz_list, word_inputs, biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs, ins_label, scope): #", "AttClassifier(data) def forward(self, gaz_list, word_inputs, biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs, ins_label,", "# MG-Lattice encoder self.encoder = BiLstmEncoder(data) # Attentive classifier self.classifier = AttClassifier(data) def", "F import numpy as np from .encoder import BiLstmEncoder from .classifier import AttClassifier", "class MGLattice_model(nn.Module): def __init__(self, data): super(MGLattice_model, self).__init__() # MG-Lattice encoder self.encoder = BiLstmEncoder(data)", "= self.encoder.get_seq_features(gaz_list, word_inputs, biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs) # batch_size *", "char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs) # batch_size * num_classes logit = self.classifier.get_logit(hidden_out, ins_label,", "pos2_inputs, ins_label, scope): # ins_num * seq_len * hidden_dim hidden_out = self.encoder.get_seq_features(gaz_list, word_inputs,", "self.encoder.get_seq_features(gaz_list, word_inputs, biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs) # batch_size * num_classes", "MG-Lattice encoder self.encoder = BiLstmEncoder(data) # Attentive classifier self.classifier = AttClassifier(data) def forward(self,", "as autograd import torch.nn as nn import torch.nn.functional as F import numpy as", "gaz_list, word_inputs, biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs, ins_label, scope): # ins_num", "# ins_num * seq_len * hidden_dim hidden_out = self.encoder.get_seq_features(gaz_list, word_inputs, biword_inputs, word_seq_lengths, char_inputs,", "* hidden_dim hidden_out = self.encoder.get_seq_features(gaz_list, word_inputs, biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs)", "np from .encoder import BiLstmEncoder from .classifier import AttClassifier from torch.autograd import Variable", "def __init__(self, data): super(MGLattice_model, self).__init__() # MG-Lattice encoder self.encoder = BiLstmEncoder(data) # Attentive", "numpy as np from .encoder import BiLstmEncoder from .classifier import AttClassifier from torch.autograd", "Variable from torch.nn import functional, init class MGLattice_model(nn.Module): def __init__(self, data): super(MGLattice_model, self).__init__()", "# Attentive classifier self.classifier = AttClassifier(data) def forward(self, gaz_list, word_inputs, biword_inputs, word_seq_lengths, char_inputs,", "init class MGLattice_model(nn.Module): def __init__(self, data): super(MGLattice_model, self).__init__() # MG-Lattice encoder self.encoder =", "import torch import torch.autograd as autograd import torch.nn as nn import torch.nn.functional as", "char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs, ins_label, scope): # ins_num * seq_len * hidden_dim hidden_out", "import BiLstmEncoder from .classifier import AttClassifier from torch.autograd import Variable from torch.nn import", "ins_num * seq_len * hidden_dim hidden_out = self.encoder.get_seq_features(gaz_list, word_inputs, biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths,", "functional, init class MGLattice_model(nn.Module): def __init__(self, data): super(MGLattice_model, self).__init__() # MG-Lattice encoder self.encoder", "self.classifier = AttClassifier(data) def forward(self, gaz_list, word_inputs, biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs,", "word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs, ins_label, scope): # ins_num * seq_len *", "torch import torch.autograd as autograd import torch.nn as nn import torch.nn.functional as F", "AttClassifier from torch.autograd import Variable from torch.nn import functional, init class MGLattice_model(nn.Module): def", ".encoder import BiLstmEncoder from .classifier import AttClassifier from torch.autograd import Variable from torch.nn", "from torch.nn import functional, init class MGLattice_model(nn.Module): def __init__(self, data): super(MGLattice_model, self).__init__() #", "__init__(self, data): super(MGLattice_model, self).__init__() # MG-Lattice encoder self.encoder = BiLstmEncoder(data) # Attentive classifier", "BiLstmEncoder(data) # Attentive classifier self.classifier = AttClassifier(data) def forward(self, gaz_list, word_inputs, biword_inputs, word_seq_lengths,", "import torch.autograd as autograd import torch.nn as nn import torch.nn.functional as F import", "import Variable from torch.nn import functional, init class MGLattice_model(nn.Module): def __init__(self, data): super(MGLattice_model,", "hidden_dim hidden_out = self.encoder.get_seq_features(gaz_list, word_inputs, biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs) #", "= AttClassifier(data) def forward(self, gaz_list, word_inputs, biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs,", "char_seq_recover, pos1_inputs, pos2_inputs, ins_label, scope): # ins_num * seq_len * hidden_dim hidden_out =", "word_inputs, biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs) # batch_size * num_classes logit", "hidden_out = self.encoder.get_seq_features(gaz_list, word_inputs, biword_inputs, word_seq_lengths, char_inputs, char_seq_lengths, char_seq_recover, pos1_inputs, pos2_inputs) # batch_size", "BiLstmEncoder from .classifier import AttClassifier from torch.autograd import Variable from torch.nn import functional,", "torch.nn as nn import torch.nn.functional as F import numpy as np from .encoder", "from .classifier import AttClassifier from torch.autograd import Variable from torch.nn import functional, init", "self.encoder = BiLstmEncoder(data) # Attentive classifier self.classifier = AttClassifier(data) def forward(self, gaz_list, word_inputs,", "autograd import torch.nn as nn import torch.nn.functional as F import numpy as np", "from .encoder import BiLstmEncoder from .classifier import AttClassifier from torch.autograd import Variable from", "import functional, init class MGLattice_model(nn.Module): def __init__(self, data): super(MGLattice_model, self).__init__() # MG-Lattice encoder", "nn import torch.nn.functional as F import numpy as np from .encoder import BiLstmEncoder", "ins_label, scope): # ins_num * seq_len * hidden_dim hidden_out = self.encoder.get_seq_features(gaz_list, word_inputs, biword_inputs," ]
[ "generated_files: dict, arguments: dict, **kwargs) -> dict: ''' Given output path dictionary of", "#!/usr/bin/env python3 ''' Plugin interface definition ''' import abc class PluginBase(metaclass=abc.ABCMeta): @abc.abstractmethod def", "PluginBase(metaclass=abc.ABCMeta): @abc.abstractmethod def plugin_id(self) -> str: ''' Return plugin identifier ''' @abc.abstractmethod def", "@abc.abstractmethod def process(self, generated_files: dict, arguments: dict, **kwargs) -> dict: ''' Given output", "''' Plugin interface definition ''' import abc class PluginBase(metaclass=abc.ABCMeta): @abc.abstractmethod def plugin_id(self) ->", "process(self, generated_files: dict, arguments: dict, **kwargs) -> dict: ''' Given output path dictionary", "''' import abc class PluginBase(metaclass=abc.ABCMeta): @abc.abstractmethod def plugin_id(self) -> str: ''' Return plugin", "generated_files ( mapping filename to FileRep) and dictionary of arguments, Return {filename: FileRep,...}", "dict, arguments: dict, **kwargs) -> dict: ''' Given output path dictionary of generated_files", "Plugin interface definition ''' import abc class PluginBase(metaclass=abc.ABCMeta): @abc.abstractmethod def plugin_id(self) -> str:", "path dictionary of generated_files ( mapping filename to FileRep) and dictionary of arguments,", "''' Given output path dictionary of generated_files ( mapping filename to FileRep) and", "identifier ''' @abc.abstractmethod def process(self, generated_files: dict, arguments: dict, **kwargs) -> dict: '''", "class PluginBase(metaclass=abc.ABCMeta): @abc.abstractmethod def plugin_id(self) -> str: ''' Return plugin identifier ''' @abc.abstractmethod", "import abc class PluginBase(metaclass=abc.ABCMeta): @abc.abstractmethod def plugin_id(self) -> str: ''' Return plugin identifier", "interface definition ''' import abc class PluginBase(metaclass=abc.ABCMeta): @abc.abstractmethod def plugin_id(self) -> str: '''", "output path dictionary of generated_files ( mapping filename to FileRep) and dictionary of", "of generated_files ( mapping filename to FileRep) and dictionary of arguments, Return {filename:", "def plugin_id(self) -> str: ''' Return plugin identifier ''' @abc.abstractmethod def process(self, generated_files:", "Return plugin identifier ''' @abc.abstractmethod def process(self, generated_files: dict, arguments: dict, **kwargs) ->", "''' Return plugin identifier ''' @abc.abstractmethod def process(self, generated_files: dict, arguments: dict, **kwargs)", "arguments: dict, **kwargs) -> dict: ''' Given output path dictionary of generated_files (", "( mapping filename to FileRep) and dictionary of arguments, Return {filename: FileRep,...} '''", "dict: ''' Given output path dictionary of generated_files ( mapping filename to FileRep)", "python3 ''' Plugin interface definition ''' import abc class PluginBase(metaclass=abc.ABCMeta): @abc.abstractmethod def plugin_id(self)", "-> dict: ''' Given output path dictionary of generated_files ( mapping filename to", "Given output path dictionary of generated_files ( mapping filename to FileRep) and dictionary", "-> str: ''' Return plugin identifier ''' @abc.abstractmethod def process(self, generated_files: dict, arguments:", "def process(self, generated_files: dict, arguments: dict, **kwargs) -> dict: ''' Given output path", "str: ''' Return plugin identifier ''' @abc.abstractmethod def process(self, generated_files: dict, arguments: dict,", "''' @abc.abstractmethod def process(self, generated_files: dict, arguments: dict, **kwargs) -> dict: ''' Given", "abc class PluginBase(metaclass=abc.ABCMeta): @abc.abstractmethod def plugin_id(self) -> str: ''' Return plugin identifier '''", "dictionary of generated_files ( mapping filename to FileRep) and dictionary of arguments, Return", "dict, **kwargs) -> dict: ''' Given output path dictionary of generated_files ( mapping", "plugin_id(self) -> str: ''' Return plugin identifier ''' @abc.abstractmethod def process(self, generated_files: dict,", "**kwargs) -> dict: ''' Given output path dictionary of generated_files ( mapping filename", "@abc.abstractmethod def plugin_id(self) -> str: ''' Return plugin identifier ''' @abc.abstractmethod def process(self,", "plugin identifier ''' @abc.abstractmethod def process(self, generated_files: dict, arguments: dict, **kwargs) -> dict:", "definition ''' import abc class PluginBase(metaclass=abc.ABCMeta): @abc.abstractmethod def plugin_id(self) -> str: ''' Return" ]
[ "on (this will be the 'parent') parent_title = sel.xpath('//title/text()').extract() if (len(parent_title)>0): parent_title =", "self.filetypes.extend(self.geofiletypes) #Better for searching later self.filetypes = tuple(self.filetypes) self.fields = ('Stadt_URL', 'URL_Datei', 'URL_Text',", "print response.headers return [] #Otherwise, its html and we process all links on", "in response.headers): item['URL_Dateiname'] = unicode(response.headers['Content-Disposition'], 'utf-8') else: item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format'] = ext[1:]", "and we are dealing with DKAN, then we are probably dealing with an", "it isn't a repetition, use it if (len(directText) > 0) and (directText !=", "Website() item['URL_Datei'] = unicode('', 'utf-8') url_file = site.xpath('@href').extract() if (len(url_file)>0): item['URL_Datei'] = url_file[0]", "we just have e.g. \"json\" and we are dealing with DKAN, then we", "or ('Content-Disposition' in response.headers and ext in response.headers['Content-Disposition'].upper())): print \"Detected a downloadable, generated", "response): for ext in self.filetypes: if (ext[1:] in response.headers['Content-Type'].upper() or ('Content-Disposition' in response.headers", "= response.url #Get all links sites = sel.xpath('//body//a') #items = [] for site", "we are probably dealing with an API item description and not a file", "self.fileoutdata = domain + \".data.csv\" self.allowed_domains = [self.domain] self.start_urls = [ \"http://www.\" +", "self.allowed_domains = [self.domain] self.start_urls = [ \"http://www.\" + domain + \"/\", ] #File", "ext[1:len(ext)] #And is it one of our special geo filetypes? if ext in", "\"Searching \" + domain + \"...\" def parse_page(self, response): for ext in self.filetypes:", "types to search for (non-geo); list so that we can extend self.filetypes =", "there's something there and it isn't a repetition, use it if (len(directText) >", "= ['.' + ft for ft in metautils.fileformats] #Geographic file types self.geofiletypes =", "nothing, then look at the title and alt elements title_text = site.xpath('@title').extract() if", "+ \".data.csv\" self.allowed_domains = [self.domain] self.start_urls = [ \"http://www.\" + domain + \"/\",", "unicode(item['URL_Datei']).split('/')[-1] item['Format'] = ext[1:] #if we just have e.g. \"json\" and we are", "item['URL_Datei'] = title_text[0] alt_text = site.xpath('@alt').extract() if (len(alt_text)>0) and (item['URL_Text'] == u''): item['URL_Datei']", "= url_file[0] item['Stadt_URL'] = unicode(self.domain, 'utf-8') #Get ALL text of everything inside the", "links on the page sel = Selector(response) #Title of the page we are", "'URL_Dateiname', 'Format', 'geo', 'URL_PARENT', 'Title_PARENT') self.writer = unicodecsv.DictWriter(open(self.fileoutall, \"wb\", encoding='utf-8'), self.fields) self.writer.writeheader() self.writerdata", "remove the '.' for ext in self.filetypes: if ext in item['URL_Dateiname'].encode('ascii', errors='ignore').upper(): item['Format']", "ALL text of everything inside the link #First any sub-elements like <span> textbits", "kann aber nachtraeglich ermittelt werden' item['Title_PARENT'] = u'Nicht moeglich kann aber nachtraeglich ermittelt", "elements title_text = site.xpath('@title').extract() if (len(title_text)>0) and (item['URL_Text'] == u''): item['URL_Datei'] = title_text[0]", "#Is it a file (does it have any of the extensions (including the", "self.writer = unicodecsv.DictWriter(open(self.fileoutall, \"wb\", encoding='utf-8'), self.fields) self.writer.writeheader() self.writerdata = unicodecsv.DictWriter(open(self.fileoutdata, \"wb\", encoding='utf-8'), self.fields)", "nachtraeglich ermittelt werden' self.writerdata.writerow(item) #Done return [] if ('Content-Type' in response.headers and 'text/html'", "def parse_page(self, response): for ext in self.filetypes: if (ext[1:] in response.headers['Content-Type'].upper() or ('Content-Disposition'", "+ \".csv\" self.fileoutdata = domain + \".data.csv\" self.allowed_domains = [self.domain] self.start_urls = [", "everything inside the link #First any sub-elements like <span> textbits = site.xpath('child::node()') item['URL_Text']", "[self.domain] self.start_urls = [ \"http://www.\" + domain + \"/\", ] #File types to", "= ext[1:] #if we just have e.g. \"json\" and we are dealing with", "self.filetypes: if ext in item['URL_Dateiname'].encode('ascii', errors='ignore').upper(): item['Format'] = ext[1:len(ext)] #And is it one", "[ \"http://www.\" + domain + \"/\", ] #File types to search for (non-geo);", ") def __init__(self, domain=None, *a, **kw): super(DataSpider, self).__init__(*a, **kw) self.domain = domain self.fileoutall", "name = \"data\" rules = ( # Extract all links and parse them", "unicode(self.domain, 'utf-8') #Get ALL text of everything inside the link #First any sub-elements", "generated file\" item = Website() item['URL_Datei'] = response.url item['Stadt_URL'] = unicode(self.domain, 'utf-8') #Not", "= parent_url item['Title_PARENT'] = parent_title #Is it a file (does it have any", "response.headers['Content-Disposition'].upper())): print \"Detected a downloadable, generated file\" item = Website() item['URL_Datei'] = response.url", "scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector from", "= site.xpath('@alt').extract() if (len(alt_text)>0) and (item['URL_Text'] == u''): item['URL_Datei'] = alt_text[0] item['URL_Dateiname'] =", "links sites = sel.xpath('//body//a') #items = [] for site in sites: item =", "scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector from dirbot.items import Website class DataSpider(CrawlSpider):", "(ext[1:] in response.headers['Content-Type'].upper() or ('Content-Disposition' in response.headers and ext in response.headers['Content-Disposition'].upper())): print \"Detected", "of the page we are on (parent) parent_url = response.url #Get all links", "ft for ft in metautils.fileformats] #Geographic file types self.geofiletypes = ['.' + ft", "types self.geofiletypes = ['.' + ft for ft in metautils.geoformats] #Combined list to", "ft for ft in metautils.geoformats] #Combined list to search for at first self.filetypes.extend(self.geofiletypes)", "0): item['URL_Text'] += thetext[0] #Then the actual text directText = site.xpath('text()').extract() #If there's", "(item['URL_Dateiname'].upper() == item['Format']) and 'node' in item['URL_Datei']: return [] if (ext in self.geofiletypes):", "link #First any sub-elements like <span> textbits = site.xpath('child::node()') item['URL_Text'] = unicode('', 'utf-8')", "'URL_Datei', 'URL_Text', 'URL_Dateiname', 'Format', 'geo', 'URL_PARENT', 'Title_PARENT') self.writer = unicodecsv.DictWriter(open(self.fileoutall, \"wb\", encoding='utf-8'), self.fields)", "for ext in self.filetypes: if (ext[1:] in response.headers['Content-Type'].upper() or ('Content-Disposition' in response.headers and", "u'Not interesting' item['geo'] = u'' item['URL_PARENT'] = parent_url item['Title_PARENT'] = parent_title #Is it", "url_file = site.xpath('@href').extract() if (len(url_file)>0): item['URL_Datei'] = url_file[0] item['Stadt_URL'] = unicode(self.domain, 'utf-8') #Get", "== u''): item['URL_Datei'] = title_text[0] alt_text = site.xpath('@alt').extract() if (len(alt_text)>0) and (item['URL_Text'] ==", "on the page sel = Selector(response) #Title of the page we are on", "item description and not a file if (item['URL_Dateiname'].upper() == item['Format']) and 'node' in", "spider's method parse_page Rule(SgmlLinkExtractor(),callback='parse_page',follow=True), ) def __init__(self, domain=None, *a, **kw): super(DataSpider, self).__init__(*a, **kw)", "a downloadable, generated file\" item = Website() item['URL_Datei'] = response.url item['Stadt_URL'] = unicode(self.domain,", "directText[0] item['URL_Text'] = item['URL_Text'].replace(\"\\t\", \" \").replace(\"\\n\", \"\").strip() #If that got us nothing, then", "\" + domain + \"...\" def parse_page(self, response): for ext in self.filetypes: if", "item['URL_Datei']: return [] if (ext in self.geofiletypes): item['geo'] = 'x' else: item['geo'] =", "a file if (item['URL_Dateiname'].upper() == item['Format']) and 'node' in item['URL_Datei']: return [] if", "'utf-8') url_file = site.xpath('@href').extract() if (len(url_file)>0): item['URL_Datei'] = url_file[0] item['Stadt_URL'] = unicode(self.domain, 'utf-8')", "item['URL_Datei'] = unicode('', 'utf-8') url_file = site.xpath('@href').extract() if (len(url_file)>0): item['URL_Datei'] = url_file[0] item['Stadt_URL']", "(len(thetext) > 0): item['URL_Text'] += thetext[0] #Then the actual text directText = site.xpath('text()').extract()", "for at first self.filetypes.extend(self.geofiletypes) #Better for searching later self.filetypes = tuple(self.filetypes) self.fields =", "ermittelt werden' item['Title_PARENT'] = u'Nicht moeglich kann aber nachtraeglich ermittelt werden' self.writerdata.writerow(item) #Done", "there and it isn't a repetition, use it if (len(directText) > 0) and", "(ext in self.geofiletypes): item['geo'] = 'x' else: item['geo'] = u'' item['URL_PARENT'] = u'Nicht", "'node' in item['URL_Datei']: return [] if (ext in self.geofiletypes): item['geo'] = 'x' else:", "and (item['URL_Text'] == u''): item['URL_Datei'] = alt_text[0] item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format'] = u'Not", "have e.g. \"json\" and we are dealing with DKAN, then we are probably", "interesting' item['geo'] = u'' item['URL_PARENT'] = parent_url item['Title_PARENT'] = parent_title #Is it a", "to search for (non-geo); list so that we can extend self.filetypes = ['.'", "any sub-elements like <span> textbits = site.xpath('child::node()') item['URL_Text'] = unicode('', 'utf-8') for text", "are dealing with DKAN, then we are probably dealing with an API item", "in response.headers['Content-Type']): print \"Not HTML or anything else of interest, giving up\" print", "= site.xpath('@href').extract() if (len(url_file)>0): item['URL_Datei'] = url_file[0] item['Stadt_URL'] = unicode(self.domain, 'utf-8') #Get ALL", "#Get ALL text of everything inside the link #First any sub-elements like <span>", "item = Website() item['URL_Datei'] = unicode('', 'utf-8') url_file = site.xpath('@href').extract() if (len(url_file)>0): item['URL_Datei']", "thetext): item['URL_Text'] += directText[0] item['URL_Text'] = item['URL_Text'].replace(\"\\t\", \" \").replace(\"\\n\", \"\").strip() #If that got", "then look at the title and alt elements title_text = site.xpath('@title').extract() if (len(title_text)>0)", "'geo', 'URL_PARENT', 'Title_PARENT') self.writer = unicodecsv.DictWriter(open(self.fileoutall, \"wb\", encoding='utf-8'), self.fields) self.writer.writeheader() self.writerdata = unicodecsv.DictWriter(open(self.fileoutdata,", "text.xpath('text()').extract() if (len(thetext) > 0): item['URL_Text'] += thetext[0] #Then the actual text directText", "#then remove the '.' for ext in self.filetypes: if ext in item['URL_Dateiname'].encode('ascii', errors='ignore').upper():", "'x' else: item['geo'] = u'' item['URL_PARENT'] = u'Nicht moeglich kann aber nachtraeglich ermittelt", "] #File types to search for (non-geo); list so that we can extend", "parent_url = response.url #Get all links sites = sel.xpath('//body//a') #items = [] for", "0) and (directText != thetext): item['URL_Text'] += directText[0] item['URL_Text'] = item['URL_Text'].replace(\"\\t\", \" \").replace(\"\\n\",", "from scrapy.selector import Selector from dirbot.items import Website class DataSpider(CrawlSpider): name = \"data\"", "repetition, use it if (len(directText) > 0) and (directText != thetext): item['URL_Text'] +=", "\".data.csv\" self.allowed_domains = [self.domain] self.start_urls = [ \"http://www.\" + domain + \"/\", ]", "\"/\", ] #File types to search for (non-geo); list so that we can", "for searching later self.filetypes = tuple(self.filetypes) self.fields = ('Stadt_URL', 'URL_Datei', 'URL_Text', 'URL_Dateiname', 'Format',", "= ( # Extract all links and parse them with the spider's method", "are on (parent) parent_url = response.url #Get all links sites = sel.xpath('//body//a') #items", "ext in item['URL_Dateiname'].encode('ascii', errors='ignore').upper(): item['Format'] = ext[1:len(ext)] #And is it one of our", "downloadable, generated file\" item = Website() item['URL_Datei'] = response.url item['Stadt_URL'] = unicode(self.domain, 'utf-8')", "search for at first self.filetypes.extend(self.geofiletypes) #Better for searching later self.filetypes = tuple(self.filetypes) self.fields", "title_text[0] alt_text = site.xpath('@alt').extract() if (len(alt_text)>0) and (item['URL_Text'] == u''): item['URL_Datei'] = alt_text[0]", "filetypes? if ext in self.geofiletypes: item['geo'] = 'x' self.writerdata.writerow(item) self.writer.writerow(item) #items.append(item) return []", "unicodecsv.DictWriter(open(self.fileoutall, \"wb\", encoding='utf-8'), self.fields) self.writer.writeheader() self.writerdata = unicodecsv.DictWriter(open(self.fileoutdata, \"wb\", encoding='utf-8'), self.fields) self.writerdata.writeheader() print", "= u'Nicht moeglich kann aber nachtraeglich ermittelt werden' item['Title_PARENT'] = u'Nicht moeglich kann", "if (len(parent_title)>0): parent_title = parent_title[0] #URL of the page we are on (parent)", "+ ft for ft in metautils.fileformats] #Geographic file types self.geofiletypes = ['.' +", "= domain self.fileoutall = domain + \".csv\" self.fileoutdata = domain + \".data.csv\" self.allowed_domains", "sel = Selector(response) #Title of the page we are on (this will be", "parse_page(self, response): for ext in self.filetypes: if (ext[1:] in response.headers['Content-Type'].upper() or ('Content-Disposition' in", "= text.xpath('text()').extract() if (len(thetext) > 0): item['URL_Text'] += thetext[0] #Then the actual text", "first self.filetypes.extend(self.geofiletypes) #Better for searching later self.filetypes = tuple(self.filetypes) self.fields = ('Stadt_URL', 'URL_Datei',", "response.url #Get all links sites = sel.xpath('//body//a') #items = [] for site in", "\"Detected a downloadable, generated file\" item = Website() item['URL_Datei'] = response.url item['Stadt_URL'] =", "self.fields) self.writerdata.writeheader() print \"Searching \" + domain + \"...\" def parse_page(self, response): for", "item['URL_Text'] += thetext[0] #Then the actual text directText = site.xpath('text()').extract() #If there's something", "in textbits: thetext = text.xpath('text()').extract() if (len(thetext) > 0): item['URL_Text'] += thetext[0] #Then", "are on (this will be the 'parent') parent_title = sel.xpath('//title/text()').extract() if (len(parent_title)>0): parent_title", "a file (does it have any of the extensions (including the '.' in", "item['Format']) and 'node' in item['URL_Datei']: return [] if (ext in self.geofiletypes): item['geo'] =", "#Title of the page we are on (this will be the 'parent') parent_title", "#Geographic file types self.geofiletypes = ['.' + ft for ft in metautils.geoformats] #Combined", "title and alt elements title_text = site.xpath('@title').extract() if (len(title_text)>0) and (item['URL_Text'] == u''):", "item['Format'] = ext[1:] #if we just have e.g. \"json\" and we are dealing", "= [] for site in sites: item = Website() item['URL_Datei'] = unicode('', 'utf-8')", "domain + \"/\", ] #File types to search for (non-geo); list so that", "domain + \"...\" def parse_page(self, response): for ext in self.filetypes: if (ext[1:] in", "self.start_urls = [ \"http://www.\" + domain + \"/\", ] #File types to search", "response.headers and 'text/html' not in response.headers['Content-Type']): print \"Not HTML or anything else of", "in self.geofiletypes): item['geo'] = 'x' else: item['geo'] = u'' item['URL_PARENT'] = u'Nicht moeglich", "file if (item['URL_Dateiname'].upper() == item['Format']) and 'node' in item['URL_Datei']: return [] if (ext", "'.' in the filename, #then remove the '.' for ext in self.filetypes: if", "= ext[1:len(ext)] #And is it one of our special geo filetypes? if ext", "errors='ignore').upper(): item['Format'] = ext[1:len(ext)] #And is it one of our special geo filetypes?", "item['URL_Datei'] = alt_text[0] item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format'] = u'Not interesting' item['geo'] = u''", "site.xpath('@title').extract() if (len(title_text)>0) and (item['URL_Text'] == u''): item['URL_Datei'] = title_text[0] alt_text = site.xpath('@alt').extract()", "ft in metautils.fileformats] #Geographic file types self.geofiletypes = ['.' + ft for ft", "#URL of the page we are on (parent) parent_url = response.url #Get all", "= u'Nicht moeglich kann aber nachtraeglich ermittelt werden' self.writerdata.writerow(item) #Done return [] if", "= item['URL_Text'].replace(\"\\t\", \" \").replace(\"\\n\", \"\").strip() #If that got us nothing, then look at", "in the filename, #then remove the '.' for ext in self.filetypes: if ext", "from dirbot.items import Website class DataSpider(CrawlSpider): name = \"data\" rules = ( #", "if (len(thetext) > 0): item['URL_Text'] += thetext[0] #Then the actual text directText =", "title_text = site.xpath('@title').extract() if (len(title_text)>0) and (item['URL_Text'] == u''): item['URL_Datei'] = title_text[0] alt_text", "directText = site.xpath('text()').extract() #If there's something there and it isn't a repetition, use", "is it one of our special geo filetypes? if ext in self.geofiletypes: item['geo']", "= u'' item['URL_PARENT'] = u'Nicht moeglich kann aber nachtraeglich ermittelt werden' item['Title_PARENT'] =", "(including the '.' in the filename, #then remove the '.' for ext in", "self.fields = ('Stadt_URL', 'URL_Datei', 'URL_Text', 'URL_Dateiname', 'Format', 'geo', 'URL_PARENT', 'Title_PARENT') self.writer = unicodecsv.DictWriter(open(self.fileoutall,", "(len(parent_title)>0): parent_title = parent_title[0] #URL of the page we are on (parent) parent_url", "response.headers['Content-Type'].upper() or ('Content-Disposition' in response.headers and ext in response.headers['Content-Disposition'].upper())): print \"Detected a downloadable,", "API item description and not a file if (item['URL_Dateiname'].upper() == item['Format']) and 'node'", "sel.xpath('//title/text()').extract() if (len(parent_title)>0): parent_title = parent_title[0] #URL of the page we are on", "we are on (parent) parent_url = response.url #Get all links sites = sel.xpath('//body//a')", "Website() item['URL_Datei'] = response.url item['Stadt_URL'] = unicode(self.domain, 'utf-8') #Not applicable item['URL_Text'] = unicode('',", "= unicode('', 'utf-8') if ('Content-Disposition' in response.headers): item['URL_Dateiname'] = unicode(response.headers['Content-Disposition'], 'utf-8') else: item['URL_Dateiname']", "isn't a repetition, use it if (len(directText) > 0) and (directText != thetext):", "dealing with an API item description and not a file if (item['URL_Dateiname'].upper() ==", "import SgmlLinkExtractor from scrapy.selector import Selector from dirbot.items import Website class DataSpider(CrawlSpider): name", "sub-elements like <span> textbits = site.xpath('child::node()') item['URL_Text'] = unicode('', 'utf-8') for text in", "kann aber nachtraeglich ermittelt werden' self.writerdata.writerow(item) #Done return [] if ('Content-Type' in response.headers", "(non-geo); list so that we can extend self.filetypes = ['.' + ft for", "at first self.filetypes.extend(self.geofiletypes) #Better for searching later self.filetypes = tuple(self.filetypes) self.fields = ('Stadt_URL',", "of everything inside the link #First any sub-elements like <span> textbits = site.xpath('child::node()')", "else of interest, giving up\" print response.headers return [] #Otherwise, its html and", "self.writerdata.writeheader() print \"Searching \" + domain + \"...\" def parse_page(self, response): for ext", "= Selector(response) #Title of the page we are on (this will be the", "('Content-Disposition' in response.headers): item['URL_Dateiname'] = unicode(response.headers['Content-Disposition'], 'utf-8') else: item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format'] =", "of the page we are on (this will be the 'parent') parent_title =", "print \"Detected a downloadable, generated file\" item = Website() item['URL_Datei'] = response.url item['Stadt_URL']", "'utf-8') if ('Content-Disposition' in response.headers): item['URL_Dateiname'] = unicode(response.headers['Content-Disposition'], 'utf-8') else: item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1]", "not a file if (item['URL_Dateiname'].upper() == item['Format']) and 'node' in item['URL_Datei']: return []", "if ext in item['URL_Dateiname'].encode('ascii', errors='ignore').upper(): item['Format'] = ext[1:len(ext)] #And is it one of", "process all links on the page sel = Selector(response) #Title of the page", "#First any sub-elements like <span> textbits = site.xpath('child::node()') item['URL_Text'] = unicode('', 'utf-8') for", "and ext in response.headers['Content-Disposition'].upper())): print \"Detected a downloadable, generated file\" item = Website()", "= [self.domain] self.start_urls = [ \"http://www.\" + domain + \"/\", ] #File types", "<span> textbits = site.xpath('child::node()') item['URL_Text'] = unicode('', 'utf-8') for text in textbits: thetext", "if (len(directText) > 0) and (directText != thetext): item['URL_Text'] += directText[0] item['URL_Text'] =", "#Done return [] if ('Content-Type' in response.headers and 'text/html' not in response.headers['Content-Type']): print", "unicode(self.domain, 'utf-8') #Not applicable item['URL_Text'] = unicode('', 'utf-8') if ('Content-Disposition' in response.headers): item['URL_Dateiname']", "['.' + ft for ft in metautils.fileformats] #Geographic file types self.geofiletypes = ['.'", "'utf-8') #Get ALL text of everything inside the link #First any sub-elements like", "'parent') parent_title = sel.xpath('//title/text()').extract() if (len(parent_title)>0): parent_title = parent_title[0] #URL of the page", "we are dealing with DKAN, then we are probably dealing with an API", "site.xpath('child::node()') item['URL_Text'] = unicode('', 'utf-8') for text in textbits: thetext = text.xpath('text()').extract() if", "text directText = site.xpath('text()').extract() #If there's something there and it isn't a repetition,", "ext in response.headers['Content-Disposition'].upper())): print \"Detected a downloadable, generated file\" item = Website() item['URL_Datei']", "file\" item = Website() item['URL_Datei'] = response.url item['Stadt_URL'] = unicode(self.domain, 'utf-8') #Not applicable", "with the spider's method parse_page Rule(SgmlLinkExtractor(),callback='parse_page',follow=True), ) def __init__(self, domain=None, *a, **kw): super(DataSpider,", "an API item description and not a file if (item['URL_Dateiname'].upper() == item['Format']) and", "[] if ('Content-Type' in response.headers and 'text/html' not in response.headers['Content-Type']): print \"Not HTML", "on (parent) parent_url = response.url #Get all links sites = sel.xpath('//body//a') #items =", "\"Not HTML or anything else of interest, giving up\" print response.headers return []", "alt_text[0] item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format'] = u'Not interesting' item['geo'] = u'' item['URL_PARENT'] =", "us nothing, then look at the title and alt elements title_text = site.xpath('@title').extract()", "\").replace(\"\\n\", \"\").strip() #If that got us nothing, then look at the title and", "import metautils from scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector", "one of our special geo filetypes? if ext in self.geofiletypes: item['geo'] = 'x'", "self.writer.writeheader() self.writerdata = unicodecsv.DictWriter(open(self.fileoutdata, \"wb\", encoding='utf-8'), self.fields) self.writerdata.writeheader() print \"Searching \" + domain", "item['URL_PARENT'] = u'Nicht moeglich kann aber nachtraeglich ermittelt werden' item['Title_PARENT'] = u'Nicht moeglich", "import Website class DataSpider(CrawlSpider): name = \"data\" rules = ( # Extract all", "ext[1:] #if we just have e.g. \"json\" and we are dealing with DKAN,", "+= directText[0] item['URL_Text'] = item['URL_Text'].replace(\"\\t\", \" \").replace(\"\\n\", \"\").strip() #If that got us nothing,", "list so that we can extend self.filetypes = ['.' + ft for ft", "print \"Not HTML or anything else of interest, giving up\" print response.headers return", "item['Title_PARENT'] = parent_title #Is it a file (does it have any of the", "geo filetypes? if ext in self.geofiletypes: item['geo'] = 'x' self.writerdata.writerow(item) self.writer.writerow(item) #items.append(item) return", "item['URL_Text'].replace(\"\\t\", \" \").replace(\"\\n\", \"\").strip() #If that got us nothing, then look at the", "alt elements title_text = site.xpath('@title').extract() if (len(title_text)>0) and (item['URL_Text'] == u''): item['URL_Datei'] =", "item = Website() item['URL_Datei'] = response.url item['Stadt_URL'] = unicode(self.domain, 'utf-8') #Not applicable item['URL_Text']", "if ('Content-Type' in response.headers and 'text/html' not in response.headers['Content-Type']): print \"Not HTML or", "the '.' for ext in self.filetypes: if ext in item['URL_Dateiname'].encode('ascii', errors='ignore').upper(): item['Format'] =", "page we are on (parent) parent_url = response.url #Get all links sites =", "'utf-8') else: item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format'] = ext[1:] #if we just have e.g.", "response.url item['Stadt_URL'] = unicode(self.domain, 'utf-8') #Not applicable item['URL_Text'] = unicode('', 'utf-8') if ('Content-Disposition'", "nachtraeglich ermittelt werden' item['Title_PARENT'] = u'Nicht moeglich kann aber nachtraeglich ermittelt werden' self.writerdata.writerow(item)", "= ['.' + ft for ft in metautils.geoformats] #Combined list to search for", "in response.headers['Content-Type'].upper() or ('Content-Disposition' in response.headers and ext in response.headers['Content-Disposition'].upper())): print \"Detected a", "of the extensions (including the '.' in the filename, #then remove the '.'", "probably dealing with an API item description and not a file if (item['URL_Dateiname'].upper()", "all links and parse them with the spider's method parse_page Rule(SgmlLinkExtractor(),callback='parse_page',follow=True), ) def", "tuple(self.filetypes) self.fields = ('Stadt_URL', 'URL_Datei', 'URL_Text', 'URL_Dateiname', 'Format', 'geo', 'URL_PARENT', 'Title_PARENT') self.writer =", "sel.xpath('//body//a') #items = [] for site in sites: item = Website() item['URL_Datei'] =", "we are on (this will be the 'parent') parent_title = sel.xpath('//title/text()').extract() if (len(parent_title)>0):", "print \"Searching \" + domain + \"...\" def parse_page(self, response): for ext in", "description and not a file if (item['URL_Dateiname'].upper() == item['Format']) and 'node' in item['URL_Datei']:", "'utf-8') for text in textbits: thetext = text.xpath('text()').extract() if (len(thetext) > 0): item['URL_Text']", "and parse them with the spider's method parse_page Rule(SgmlLinkExtractor(),callback='parse_page',follow=True), ) def __init__(self, domain=None,", "__init__(self, domain=None, *a, **kw): super(DataSpider, self).__init__(*a, **kw) self.domain = domain self.fileoutall = domain", "search for (non-geo); list so that we can extend self.filetypes = ['.' +", "= site.xpath('@title').extract() if (len(title_text)>0) and (item['URL_Text'] == u''): item['URL_Datei'] = title_text[0] alt_text =", "#File types to search for (non-geo); list so that we can extend self.filetypes", "interest, giving up\" print response.headers return [] #Otherwise, its html and we process", "(len(title_text)>0) and (item['URL_Text'] == u''): item['URL_Datei'] = title_text[0] alt_text = site.xpath('@alt').extract() if (len(alt_text)>0)", "up\" print response.headers return [] #Otherwise, its html and we process all links", "look at the title and alt elements title_text = site.xpath('@title').extract() if (len(title_text)>0) and", "'URL_Text', 'URL_Dateiname', 'Format', 'geo', 'URL_PARENT', 'Title_PARENT') self.writer = unicodecsv.DictWriter(open(self.fileoutall, \"wb\", encoding='utf-8'), self.fields) self.writer.writeheader()", "unicodecsv import metautils from scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from", "in response.headers and ext in response.headers['Content-Disposition'].upper())): print \"Detected a downloadable, generated file\" item", "item['geo'] = u'' item['URL_PARENT'] = parent_url item['Title_PARENT'] = parent_title #Is it a file", "> 0) and (directText != thetext): item['URL_Text'] += directText[0] item['URL_Text'] = item['URL_Text'].replace(\"\\t\", \"", "(len(url_file)>0): item['URL_Datei'] = url_file[0] item['Stadt_URL'] = unicode(self.domain, 'utf-8') #Get ALL text of everything", "later self.filetypes = tuple(self.filetypes) self.fields = ('Stadt_URL', 'URL_Datei', 'URL_Text', 'URL_Dateiname', 'Format', 'geo', 'URL_PARENT',", "it if (len(directText) > 0) and (directText != thetext): item['URL_Text'] += directText[0] item['URL_Text']", "parent_title = sel.xpath('//title/text()').extract() if (len(parent_title)>0): parent_title = parent_title[0] #URL of the page we", "domain + \".data.csv\" self.allowed_domains = [self.domain] self.start_urls = [ \"http://www.\" + domain +", "the filename, #then remove the '.' for ext in self.filetypes: if ext in", "file types self.geofiletypes = ['.' + ft for ft in metautils.geoformats] #Combined list", "list to search for at first self.filetypes.extend(self.geofiletypes) #Better for searching later self.filetypes =", "item['URL_PARENT'] = parent_url item['Title_PARENT'] = parent_title #Is it a file (does it have", "['.' + ft for ft in metautils.geoformats] #Combined list to search for at", "= site.xpath('child::node()') item['URL_Text'] = unicode('', 'utf-8') for text in textbits: thetext = text.xpath('text()').extract()", "item['URL_Datei'] = url_file[0] item['Stadt_URL'] = unicode(self.domain, 'utf-8') #Get ALL text of everything inside", "aber nachtraeglich ermittelt werden' item['Title_PARENT'] = u'Nicht moeglich kann aber nachtraeglich ermittelt werden'", "that we can extend self.filetypes = ['.' + ft for ft in metautils.fileformats]", "item['URL_Datei'] = response.url item['Stadt_URL'] = unicode(self.domain, 'utf-8') #Not applicable item['URL_Text'] = unicode('', 'utf-8')", "+ ft for ft in metautils.geoformats] #Combined list to search for at first", "'utf-8') #Not applicable item['URL_Text'] = unicode('', 'utf-8') if ('Content-Disposition' in response.headers): item['URL_Dateiname'] =", "#Get all links sites = sel.xpath('//body//a') #items = [] for site in sites:", "from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector from dirbot.items import Website class", "\"data\" rules = ( # Extract all links and parse them with the", "for site in sites: item = Website() item['URL_Datei'] = unicode('', 'utf-8') url_file =", "and alt elements title_text = site.xpath('@title').extract() if (len(title_text)>0) and (item['URL_Text'] == u''): item['URL_Datei']", "links and parse them with the spider's method parse_page Rule(SgmlLinkExtractor(),callback='parse_page',follow=True), ) def __init__(self,", "**kw): super(DataSpider, self).__init__(*a, **kw) self.domain = domain self.fileoutall = domain + \".csv\" self.fileoutdata", "[] #Otherwise, its html and we process all links on the page sel", "our special geo filetypes? if ext in self.geofiletypes: item['geo'] = 'x' self.writerdata.writerow(item) self.writer.writerow(item)", "self.fileoutall = domain + \".csv\" self.fileoutdata = domain + \".data.csv\" self.allowed_domains = [self.domain]", "item['URL_Dateiname'].encode('ascii', errors='ignore').upper(): item['Format'] = ext[1:len(ext)] #And is it one of our special geo", "domain self.fileoutall = domain + \".csv\" self.fileoutdata = domain + \".data.csv\" self.allowed_domains =", "+ domain + \"/\", ] #File types to search for (non-geo); list so", "the page we are on (parent) parent_url = response.url #Get all links sites", "\"json\" and we are dealing with DKAN, then we are probably dealing with", "text in textbits: thetext = text.xpath('text()').extract() if (len(thetext) > 0): item['URL_Text'] += thetext[0]", "def __init__(self, domain=None, *a, **kw): super(DataSpider, self).__init__(*a, **kw) self.domain = domain self.fileoutall =", "parent_title #Is it a file (does it have any of the extensions (including", "and it isn't a repetition, use it if (len(directText) > 0) and (directText", "metautils from scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import", "= response.url item['Stadt_URL'] = unicode(self.domain, 'utf-8') #Not applicable item['URL_Text'] = unicode('', 'utf-8') if", "+= thetext[0] #Then the actual text directText = site.xpath('text()').extract() #If there's something there", "u'' item['URL_PARENT'] = parent_url item['Title_PARENT'] = parent_title #Is it a file (does it", "item['Format'] = ext[1:len(ext)] #And is it one of our special geo filetypes? if", "site.xpath('@href').extract() if (len(url_file)>0): item['URL_Datei'] = url_file[0] item['Stadt_URL'] = unicode(self.domain, 'utf-8') #Get ALL text", "= unicode('', 'utf-8') for text in textbits: thetext = text.xpath('text()').extract() if (len(thetext) >", "( # Extract all links and parse them with the spider's method parse_page", "(directText != thetext): item['URL_Text'] += directText[0] item['URL_Text'] = item['URL_Text'].replace(\"\\t\", \" \").replace(\"\\n\", \"\").strip() #If", "it have any of the extensions (including the '.' in the filename, #then", "dealing with DKAN, then we are probably dealing with an API item description", "inside the link #First any sub-elements like <span> textbits = site.xpath('child::node()') item['URL_Text'] =", "with an API item description and not a file if (item['URL_Dateiname'].upper() == item['Format'])", "item['geo'] = u'' item['URL_PARENT'] = u'Nicht moeglich kann aber nachtraeglich ermittelt werden' item['Title_PARENT']", "we can extend self.filetypes = ['.' + ft for ft in metautils.fileformats] #Geographic", "thetext = text.xpath('text()').extract() if (len(thetext) > 0): item['URL_Text'] += thetext[0] #Then the actual", "item['URL_Text'] = unicode('', 'utf-8') for text in textbits: thetext = text.xpath('text()').extract() if (len(thetext)", "= unicode(self.domain, 'utf-8') #Not applicable item['URL_Text'] = unicode('', 'utf-8') if ('Content-Disposition' in response.headers):", "if ('Content-Disposition' in response.headers): item['URL_Dateiname'] = unicode(response.headers['Content-Disposition'], 'utf-8') else: item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format']", "ext in self.filetypes: if ext in item['URL_Dateiname'].encode('ascii', errors='ignore').upper(): item['Format'] = ext[1:len(ext)] #And is", "item['Title_PARENT'] = u'Nicht moeglich kann aber nachtraeglich ermittelt werden' self.writerdata.writerow(item) #Done return []", "if (len(title_text)>0) and (item['URL_Text'] == u''): item['URL_Datei'] = title_text[0] alt_text = site.xpath('@alt').extract() if", "\"...\" def parse_page(self, response): for ext in self.filetypes: if (ext[1:] in response.headers['Content-Type'].upper() or", "self.writerdata.writerow(item) #Done return [] if ('Content-Type' in response.headers and 'text/html' not in response.headers['Content-Type']):", "unicode(response.headers['Content-Disposition'], 'utf-8') else: item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format'] = ext[1:] #if we just have", "self.geofiletypes = ['.' + ft for ft in metautils.geoformats] #Combined list to search", "\".csv\" self.fileoutdata = domain + \".data.csv\" self.allowed_domains = [self.domain] self.start_urls = [ \"http://www.\"", "extensions (including the '.' in the filename, #then remove the '.' for ext", "e.g. \"json\" and we are dealing with DKAN, then we are probably dealing", "item['Format'] = u'Not interesting' item['geo'] = u'' item['URL_PARENT'] = parent_url item['Title_PARENT'] = parent_title", "SgmlLinkExtractor from scrapy.selector import Selector from dirbot.items import Website class DataSpider(CrawlSpider): name =", "+ \"/\", ] #File types to search for (non-geo); list so that we", "u'' item['URL_PARENT'] = u'Nicht moeglich kann aber nachtraeglich ermittelt werden' item['Title_PARENT'] = u'Nicht", "#If there's something there and it isn't a repetition, use it if (len(directText)", "!= thetext): item['URL_Text'] += directText[0] item['URL_Text'] = item['URL_Text'].replace(\"\\t\", \" \").replace(\"\\n\", \"\").strip() #If that", "(does it have any of the extensions (including the '.' in the filename,", "of our special geo filetypes? if ext in self.geofiletypes: item['geo'] = 'x' self.writerdata.writerow(item)", "text of everything inside the link #First any sub-elements like <span> textbits =", "moeglich kann aber nachtraeglich ermittelt werden' item['Title_PARENT'] = u'Nicht moeglich kann aber nachtraeglich", "(item['URL_Text'] == u''): item['URL_Datei'] = title_text[0] alt_text = site.xpath('@alt').extract() if (len(alt_text)>0) and (item['URL_Text']", "u'Nicht moeglich kann aber nachtraeglich ermittelt werden' item['Title_PARENT'] = u'Nicht moeglich kann aber", "file (does it have any of the extensions (including the '.' in the", "= unicode(item['URL_Datei']).split('/')[-1] item['Format'] = ext[1:] #if we just have e.g. \"json\" and we", "== u''): item['URL_Datei'] = alt_text[0] item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format'] = u'Not interesting' item['geo']", "return [] if ('Content-Type' in response.headers and 'text/html' not in response.headers['Content-Type']): print \"Not", "domain + \".csv\" self.fileoutdata = domain + \".data.csv\" self.allowed_domains = [self.domain] self.start_urls =", "so that we can extend self.filetypes = ['.' + ft for ft in", "domain=None, *a, **kw): super(DataSpider, self).__init__(*a, **kw) self.domain = domain self.fileoutall = domain +", "= 'x' else: item['geo'] = u'' item['URL_PARENT'] = u'Nicht moeglich kann aber nachtraeglich", "= unicode(self.domain, 'utf-8') #Get ALL text of everything inside the link #First any", "= domain + \".csv\" self.fileoutdata = domain + \".data.csv\" self.allowed_domains = [self.domain] self.start_urls", "ermittelt werden' self.writerdata.writerow(item) #Done return [] if ('Content-Type' in response.headers and 'text/html' not", "got us nothing, then look at the title and alt elements title_text =", "'.' for ext in self.filetypes: if ext in item['URL_Dateiname'].encode('ascii', errors='ignore').upper(): item['Format'] = ext[1:len(ext)]", "+ domain + \"...\" def parse_page(self, response): for ext in self.filetypes: if (ext[1:]", "Selector(response) #Title of the page we are on (this will be the 'parent')", "= title_text[0] alt_text = site.xpath('@alt').extract() if (len(alt_text)>0) and (item['URL_Text'] == u''): item['URL_Datei'] =", "('Content-Type' in response.headers and 'text/html' not in response.headers['Content-Type']): print \"Not HTML or anything", "dirbot.items import Website class DataSpider(CrawlSpider): name = \"data\" rules = ( # Extract", "in metautils.fileformats] #Geographic file types self.geofiletypes = ['.' + ft for ft in", "parent_title[0] #URL of the page we are on (parent) parent_url = response.url #Get", "for ft in metautils.fileformats] #Geographic file types self.geofiletypes = ['.' + ft for", "like <span> textbits = site.xpath('child::node()') item['URL_Text'] = unicode('', 'utf-8') for text in textbits:", "in sites: item = Website() item['URL_Datei'] = unicode('', 'utf-8') url_file = site.xpath('@href').extract() if", "self.filetypes = ['.' + ft for ft in metautils.fileformats] #Geographic file types self.geofiletypes", "self.domain = domain self.fileoutall = domain + \".csv\" self.fileoutdata = domain + \".data.csv\"", "('Stadt_URL', 'URL_Datei', 'URL_Text', 'URL_Dateiname', 'Format', 'geo', 'URL_PARENT', 'Title_PARENT') self.writer = unicodecsv.DictWriter(open(self.fileoutall, \"wb\", encoding='utf-8'),", "else: item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format'] = ext[1:] #if we just have e.g. \"json\"", "of interest, giving up\" print response.headers return [] #Otherwise, its html and we", "response.headers and ext in response.headers['Content-Disposition'].upper())): print \"Detected a downloadable, generated file\" item =", "self.fields) self.writer.writeheader() self.writerdata = unicodecsv.DictWriter(open(self.fileoutdata, \"wb\", encoding='utf-8'), self.fields) self.writerdata.writeheader() print \"Searching \" +", "#Better for searching later self.filetypes = tuple(self.filetypes) self.fields = ('Stadt_URL', 'URL_Datei', 'URL_Text', 'URL_Dateiname',", "it a file (does it have any of the extensions (including the '.'", "super(DataSpider, self).__init__(*a, **kw) self.domain = domain self.fileoutall = domain + \".csv\" self.fileoutdata =", "thetext[0] #Then the actual text directText = site.xpath('text()').extract() #If there's something there and", "item['URL_Text'] = item['URL_Text'].replace(\"\\t\", \" \").replace(\"\\n\", \"\").strip() #If that got us nothing, then look", "+ \"...\" def parse_page(self, response): for ext in self.filetypes: if (ext[1:] in response.headers['Content-Type'].upper()", "(item['URL_Text'] == u''): item['URL_Datei'] = alt_text[0] item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format'] = u'Not interesting'", "scrapy.selector import Selector from dirbot.items import Website class DataSpider(CrawlSpider): name = \"data\" rules", "at the title and alt elements title_text = site.xpath('@title').extract() if (len(title_text)>0) and (item['URL_Text']", "CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector from dirbot.items import", "item['Stadt_URL'] = unicode(self.domain, 'utf-8') #Not applicable item['URL_Text'] = unicode('', 'utf-8') if ('Content-Disposition' in", "be the 'parent') parent_title = sel.xpath('//title/text()').extract() if (len(parent_title)>0): parent_title = parent_title[0] #URL of", "something there and it isn't a repetition, use it if (len(directText) > 0)", "== item['Format']) and 'node' in item['URL_Datei']: return [] if (ext in self.geofiletypes): item['geo']", "item['URL_Text'] += directText[0] item['URL_Text'] = item['URL_Text'].replace(\"\\t\", \" \").replace(\"\\n\", \"\").strip() #If that got us", "for ext in self.filetypes: if ext in item['URL_Dateiname'].encode('ascii', errors='ignore').upper(): item['Format'] = ext[1:len(ext)] #And", "then we are probably dealing with an API item description and not a", "# Extract all links and parse them with the spider's method parse_page Rule(SgmlLinkExtractor(),callback='parse_page',follow=True),", "= parent_title #Is it a file (does it have any of the extensions", "special geo filetypes? if ext in self.geofiletypes: item['geo'] = 'x' self.writerdata.writerow(item) self.writer.writerow(item) #items.append(item)", "for ft in metautils.geoformats] #Combined list to search for at first self.filetypes.extend(self.geofiletypes) #Better", "in metautils.geoformats] #Combined list to search for at first self.filetypes.extend(self.geofiletypes) #Better for searching", "metautils.geoformats] #Combined list to search for at first self.filetypes.extend(self.geofiletypes) #Better for searching later", "and not a file if (item['URL_Dateiname'].upper() == item['Format']) and 'node' in item['URL_Datei']: return", "metautils.fileformats] #Geographic file types self.geofiletypes = ['.' + ft for ft in metautils.geoformats]", "= tuple(self.filetypes) self.fields = ('Stadt_URL', 'URL_Datei', 'URL_Text', 'URL_Dateiname', 'Format', 'geo', 'URL_PARENT', 'Title_PARENT') self.writer", "'URL_PARENT', 'Title_PARENT') self.writer = unicodecsv.DictWriter(open(self.fileoutall, \"wb\", encoding='utf-8'), self.fields) self.writer.writeheader() self.writerdata = unicodecsv.DictWriter(open(self.fileoutdata, \"wb\",", "not in response.headers['Content-Type']): print \"Not HTML or anything else of interest, giving up\"", "if (len(alt_text)>0) and (item['URL_Text'] == u''): item['URL_Datei'] = alt_text[0] item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format']", "the link #First any sub-elements like <span> textbits = site.xpath('child::node()') item['URL_Text'] = unicode('',", "the page we are on (this will be the 'parent') parent_title = sel.xpath('//title/text()').extract()", "url_file[0] item['Stadt_URL'] = unicode(self.domain, 'utf-8') #Get ALL text of everything inside the link", "them with the spider's method parse_page Rule(SgmlLinkExtractor(),callback='parse_page',follow=True), ) def __init__(self, domain=None, *a, **kw):", "alt_text = site.xpath('@alt').extract() if (len(alt_text)>0) and (item['URL_Text'] == u''): item['URL_Datei'] = alt_text[0] item['URL_Dateiname']", "applicable item['URL_Text'] = unicode('', 'utf-8') if ('Content-Disposition' in response.headers): item['URL_Dateiname'] = unicode(response.headers['Content-Disposition'], 'utf-8')", "from scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector", "textbits: thetext = text.xpath('text()').extract() if (len(thetext) > 0): item['URL_Text'] += thetext[0] #Then the", "item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format'] = ext[1:] #if we just have e.g. \"json\" and", "and 'node' in item['URL_Datei']: return [] if (ext in self.geofiletypes): item['geo'] = 'x'", "= Website() item['URL_Datei'] = unicode('', 'utf-8') url_file = site.xpath('@href').extract() if (len(url_file)>0): item['URL_Datei'] =", "the page sel = Selector(response) #Title of the page we are on (this", "u''): item['URL_Datei'] = alt_text[0] item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format'] = u'Not interesting' item['geo'] =", "unicode('', 'utf-8') for text in textbits: thetext = text.xpath('text()').extract() if (len(thetext) > 0):", "#if we just have e.g. \"json\" and we are dealing with DKAN, then", "unicodecsv.DictWriter(open(self.fileoutdata, \"wb\", encoding='utf-8'), self.fields) self.writerdata.writeheader() print \"Searching \" + domain + \"...\" def", "if (ext in self.geofiletypes): item['geo'] = 'x' else: item['geo'] = u'' item['URL_PARENT'] =", "we process all links on the page sel = Selector(response) #Title of the", "its html and we process all links on the page sel = Selector(response)", "**kw) self.domain = domain self.fileoutall = domain + \".csv\" self.fileoutdata = domain +", "and we process all links on the page sel = Selector(response) #Title of", "(this will be the 'parent') parent_title = sel.xpath('//title/text()').extract() if (len(parent_title)>0): parent_title = parent_title[0]", "anything else of interest, giving up\" print response.headers return [] #Otherwise, its html", "the 'parent') parent_title = sel.xpath('//title/text()').extract() if (len(parent_title)>0): parent_title = parent_title[0] #URL of the", "Extract all links and parse them with the spider's method parse_page Rule(SgmlLinkExtractor(),callback='parse_page',follow=True), )", "page sel = Selector(response) #Title of the page we are on (this will", "Selector from dirbot.items import Website class DataSpider(CrawlSpider): name = \"data\" rules = (", "Website class DataSpider(CrawlSpider): name = \"data\" rules = ( # Extract all links", "use it if (len(directText) > 0) and (directText != thetext): item['URL_Text'] += directText[0]", "'Title_PARENT') self.writer = unicodecsv.DictWriter(open(self.fileoutall, \"wb\", encoding='utf-8'), self.fields) self.writer.writeheader() self.writerdata = unicodecsv.DictWriter(open(self.fileoutdata, \"wb\", encoding='utf-8'),", "HTML or anything else of interest, giving up\" print response.headers return [] #Otherwise,", "import unicodecsv import metautils from scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor", "in self.filetypes: if (ext[1:] in response.headers['Content-Type'].upper() or ('Content-Disposition' in response.headers and ext in", "ft in metautils.geoformats] #Combined list to search for at first self.filetypes.extend(self.geofiletypes) #Better for", "return [] if (ext in self.geofiletypes): item['geo'] = 'x' else: item['geo'] = u''", "for (non-geo); list so that we can extend self.filetypes = ['.' + ft", "item['URL_Text'] = unicode('', 'utf-8') if ('Content-Disposition' in response.headers): item['URL_Dateiname'] = unicode(response.headers['Content-Disposition'], 'utf-8') else:", "= [ \"http://www.\" + domain + \"/\", ] #File types to search for", "have any of the extensions (including the '.' in the filename, #then remove", "#Not applicable item['URL_Text'] = unicode('', 'utf-8') if ('Content-Disposition' in response.headers): item['URL_Dateiname'] = unicode(response.headers['Content-Disposition'],", "*a, **kw): super(DataSpider, self).__init__(*a, **kw) self.domain = domain self.fileoutall = domain + \".csv\"", "the spider's method parse_page Rule(SgmlLinkExtractor(),callback='parse_page',follow=True), ) def __init__(self, domain=None, *a, **kw): super(DataSpider, self).__init__(*a,", "encoding='utf-8'), self.fields) self.writer.writeheader() self.writerdata = unicodecsv.DictWriter(open(self.fileoutdata, \"wb\", encoding='utf-8'), self.fields) self.writerdata.writeheader() print \"Searching \"", "site.xpath('text()').extract() #If there's something there and it isn't a repetition, use it if", "all links on the page sel = Selector(response) #Title of the page we", "Rule(SgmlLinkExtractor(),callback='parse_page',follow=True), ) def __init__(self, domain=None, *a, **kw): super(DataSpider, self).__init__(*a, **kw) self.domain = domain", "= unicode(response.headers['Content-Disposition'], 'utf-8') else: item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format'] = ext[1:] #if we just", "> 0): item['URL_Text'] += thetext[0] #Then the actual text directText = site.xpath('text()').extract() #If", "\"wb\", encoding='utf-8'), self.fields) self.writerdata.writeheader() print \"Searching \" + domain + \"...\" def parse_page(self,", "actual text directText = site.xpath('text()').extract() #If there's something there and it isn't a", "parse_page Rule(SgmlLinkExtractor(),callback='parse_page',follow=True), ) def __init__(self, domain=None, *a, **kw): super(DataSpider, self).__init__(*a, **kw) self.domain =", "#And is it one of our special geo filetypes? if ext in self.geofiletypes:", "#items = [] for site in sites: item = Website() item['URL_Datei'] = unicode('',", "site in sites: item = Website() item['URL_Datei'] = unicode('', 'utf-8') url_file = site.xpath('@href').extract()", "u'Nicht moeglich kann aber nachtraeglich ermittelt werden' self.writerdata.writerow(item) #Done return [] if ('Content-Type'", "rules = ( # Extract all links and parse them with the spider's", "ext in self.filetypes: if (ext[1:] in response.headers['Content-Type'].upper() or ('Content-Disposition' in response.headers and ext", "textbits = site.xpath('child::node()') item['URL_Text'] = unicode('', 'utf-8') for text in textbits: thetext =", "sites = sel.xpath('//body//a') #items = [] for site in sites: item = Website()", "response.headers return [] #Otherwise, its html and we process all links on the", "= sel.xpath('//title/text()').extract() if (len(parent_title)>0): parent_title = parent_title[0] #URL of the page we are", "giving up\" print response.headers return [] #Otherwise, its html and we process all", "= parent_title[0] #URL of the page we are on (parent) parent_url = response.url", "= unicode('', 'utf-8') url_file = site.xpath('@href').extract() if (len(url_file)>0): item['URL_Datei'] = url_file[0] item['Stadt_URL'] =", "can extend self.filetypes = ['.' + ft for ft in metautils.fileformats] #Geographic file", "the actual text directText = site.xpath('text()').extract() #If there's something there and it isn't", "= domain + \".data.csv\" self.allowed_domains = [self.domain] self.start_urls = [ \"http://www.\" + domain", "that got us nothing, then look at the title and alt elements title_text", "in item['URL_Dateiname'].encode('ascii', errors='ignore').upper(): item['Format'] = ext[1:len(ext)] #And is it one of our special", "if (ext[1:] in response.headers['Content-Type'].upper() or ('Content-Disposition' in response.headers and ext in response.headers['Content-Disposition'].upper())): print", "item['geo'] = 'x' else: item['geo'] = u'' item['URL_PARENT'] = u'Nicht moeglich kann aber", "or anything else of interest, giving up\" print response.headers return [] #Otherwise, its", "self.writerdata = unicodecsv.DictWriter(open(self.fileoutdata, \"wb\", encoding='utf-8'), self.fields) self.writerdata.writeheader() print \"Searching \" + domain +", "just have e.g. \"json\" and we are dealing with DKAN, then we are", "self.filetypes: if (ext[1:] in response.headers['Content-Type'].upper() or ('Content-Disposition' in response.headers and ext in response.headers['Content-Disposition'].upper())):", "'text/html' not in response.headers['Content-Type']): print \"Not HTML or anything else of interest, giving", "method parse_page Rule(SgmlLinkExtractor(),callback='parse_page',follow=True), ) def __init__(self, domain=None, *a, **kw): super(DataSpider, self).__init__(*a, **kw) self.domain", "it one of our special geo filetypes? if ext in self.geofiletypes: item['geo'] =", "DataSpider(CrawlSpider): name = \"data\" rules = ( # Extract all links and parse", "import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector from dirbot.items", "unicode('', 'utf-8') if ('Content-Disposition' in response.headers): item['URL_Dateiname'] = unicode(response.headers['Content-Disposition'], 'utf-8') else: item['URL_Dateiname'] =", "= ('Stadt_URL', 'URL_Datei', 'URL_Text', 'URL_Dateiname', 'Format', 'geo', 'URL_PARENT', 'Title_PARENT') self.writer = unicodecsv.DictWriter(open(self.fileoutall, \"wb\",", "and (directText != thetext): item['URL_Text'] += directText[0] item['URL_Text'] = item['URL_Text'].replace(\"\\t\", \" \").replace(\"\\n\", \"\").strip()", "DKAN, then we are probably dealing with an API item description and not", "item['Stadt_URL'] = unicode(self.domain, 'utf-8') #Get ALL text of everything inside the link #First", "moeglich kann aber nachtraeglich ermittelt werden' self.writerdata.writerow(item) #Done return [] if ('Content-Type' in", "#Then the actual text directText = site.xpath('text()').extract() #If there's something there and it", "\"wb\", encoding='utf-8'), self.fields) self.writer.writeheader() self.writerdata = unicodecsv.DictWriter(open(self.fileoutdata, \"wb\", encoding='utf-8'), self.fields) self.writerdata.writeheader() print \"Searching", "unicode(item['URL_Datei']).split('/')[-1] item['Format'] = u'Not interesting' item['geo'] = u'' item['URL_PARENT'] = parent_url item['Title_PARENT'] =", "are probably dealing with an API item description and not a file if", "[] for site in sites: item = Website() item['URL_Datei'] = unicode('', 'utf-8') url_file", "\" \").replace(\"\\n\", \"\").strip() #If that got us nothing, then look at the title", "in response.headers['Content-Disposition'].upper())): print \"Detected a downloadable, generated file\" item = Website() item['URL_Datei'] =", "in self.filetypes: if ext in item['URL_Dateiname'].encode('ascii', errors='ignore').upper(): item['Format'] = ext[1:len(ext)] #And is it", "if (len(url_file)>0): item['URL_Datei'] = url_file[0] item['Stadt_URL'] = unicode(self.domain, 'utf-8') #Get ALL text of", "werden' self.writerdata.writerow(item) #Done return [] if ('Content-Type' in response.headers and 'text/html' not in", "a repetition, use it if (len(directText) > 0) and (directText != thetext): item['URL_Text']", "parent_title = parent_title[0] #URL of the page we are on (parent) parent_url =", "in response.headers and 'text/html' not in response.headers['Content-Type']): print \"Not HTML or anything else", "the extensions (including the '.' in the filename, #then remove the '.' for", "= alt_text[0] item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format'] = u'Not interesting' item['geo'] = u'' item['URL_PARENT']", "werden' item['Title_PARENT'] = u'Nicht moeglich kann aber nachtraeglich ermittelt werden' self.writerdata.writerow(item) #Done return", "= u'Not interesting' item['geo'] = u'' item['URL_PARENT'] = parent_url item['Title_PARENT'] = parent_title #Is", "#Otherwise, its html and we process all links on the page sel =", "searching later self.filetypes = tuple(self.filetypes) self.fields = ('Stadt_URL', 'URL_Datei', 'URL_Text', 'URL_Dateiname', 'Format', 'geo',", "and 'text/html' not in response.headers['Content-Type']): print \"Not HTML or anything else of interest,", "with DKAN, then we are probably dealing with an API item description and", "= sel.xpath('//body//a') #items = [] for site in sites: item = Website() item['URL_Datei']", "class DataSpider(CrawlSpider): name = \"data\" rules = ( # Extract all links and", "self.filetypes = tuple(self.filetypes) self.fields = ('Stadt_URL', 'URL_Datei', 'URL_Text', 'URL_Dateiname', 'Format', 'geo', 'URL_PARENT', 'Title_PARENT')", "= unicodecsv.DictWriter(open(self.fileoutdata, \"wb\", encoding='utf-8'), self.fields) self.writerdata.writeheader() print \"Searching \" + domain + \"...\"", "import Selector from dirbot.items import Website class DataSpider(CrawlSpider): name = \"data\" rules =", "self.geofiletypes): item['geo'] = 'x' else: item['geo'] = u'' item['URL_PARENT'] = u'Nicht moeglich kann", "response.headers['Content-Type']): print \"Not HTML or anything else of interest, giving up\" print response.headers", "sites: item = Website() item['URL_Datei'] = unicode('', 'utf-8') url_file = site.xpath('@href').extract() if (len(url_file)>0):", "parent_url item['Title_PARENT'] = parent_title #Is it a file (does it have any of", "item['URL_Dateiname'] = unicode(response.headers['Content-Disposition'], 'utf-8') else: item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format'] = ext[1:] #if we", "unicode('', 'utf-8') url_file = site.xpath('@href').extract() if (len(url_file)>0): item['URL_Datei'] = url_file[0] item['Stadt_URL'] = unicode(self.domain,", "Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector from dirbot.items import Website", "will be the 'parent') parent_title = sel.xpath('//title/text()').extract() if (len(parent_title)>0): parent_title = parent_title[0] #URL", "return [] #Otherwise, its html and we process all links on the page", "all links sites = sel.xpath('//body//a') #items = [] for site in sites: item", "(parent) parent_url = response.url #Get all links sites = sel.xpath('//body//a') #items = []", "extend self.filetypes = ['.' + ft for ft in metautils.fileformats] #Geographic file types", "encoding='utf-8'), self.fields) self.writerdata.writeheader() print \"Searching \" + domain + \"...\" def parse_page(self, response):", "page we are on (this will be the 'parent') parent_title = sel.xpath('//title/text()').extract() if", "= Website() item['URL_Datei'] = response.url item['Stadt_URL'] = unicode(self.domain, 'utf-8') #Not applicable item['URL_Text'] =", "#Combined list to search for at first self.filetypes.extend(self.geofiletypes) #Better for searching later self.filetypes", "[] if (ext in self.geofiletypes): item['geo'] = 'x' else: item['geo'] = u'' item['URL_PARENT']", "the title and alt elements title_text = site.xpath('@title').extract() if (len(title_text)>0) and (item['URL_Text'] ==", "u''): item['URL_Datei'] = title_text[0] alt_text = site.xpath('@alt').extract() if (len(alt_text)>0) and (item['URL_Text'] == u''):", "aber nachtraeglich ermittelt werden' self.writerdata.writerow(item) #Done return [] if ('Content-Type' in response.headers and", "= \"data\" rules = ( # Extract all links and parse them with", "(len(alt_text)>0) and (item['URL_Text'] == u''): item['URL_Datei'] = alt_text[0] item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format'] =", "self).__init__(*a, **kw) self.domain = domain self.fileoutall = domain + \".csv\" self.fileoutdata = domain", "= unicode(item['URL_Datei']).split('/')[-1] item['Format'] = u'Not interesting' item['geo'] = u'' item['URL_PARENT'] = parent_url item['Title_PARENT']", "html and we process all links on the page sel = Selector(response) #Title", "(len(directText) > 0) and (directText != thetext): item['URL_Text'] += directText[0] item['URL_Text'] = item['URL_Text'].replace(\"\\t\",", "filename, #then remove the '.' for ext in self.filetypes: if ext in item['URL_Dateiname'].encode('ascii',", "to search for at first self.filetypes.extend(self.geofiletypes) #Better for searching later self.filetypes = tuple(self.filetypes)", "for text in textbits: thetext = text.xpath('text()').extract() if (len(thetext) > 0): item['URL_Text'] +=", "else: item['geo'] = u'' item['URL_PARENT'] = u'Nicht moeglich kann aber nachtraeglich ermittelt werden'", "#If that got us nothing, then look at the title and alt elements", "'Format', 'geo', 'URL_PARENT', 'Title_PARENT') self.writer = unicodecsv.DictWriter(open(self.fileoutall, \"wb\", encoding='utf-8'), self.fields) self.writer.writeheader() self.writerdata =", "if (item['URL_Dateiname'].upper() == item['Format']) and 'node' in item['URL_Datei']: return [] if (ext in", "and (item['URL_Text'] == u''): item['URL_Datei'] = title_text[0] alt_text = site.xpath('@alt').extract() if (len(alt_text)>0) and", "= unicodecsv.DictWriter(open(self.fileoutall, \"wb\", encoding='utf-8'), self.fields) self.writer.writeheader() self.writerdata = unicodecsv.DictWriter(open(self.fileoutdata, \"wb\", encoding='utf-8'), self.fields) self.writerdata.writeheader()", "in item['URL_Datei']: return [] if (ext in self.geofiletypes): item['geo'] = 'x' else: item['geo']", "the '.' in the filename, #then remove the '.' for ext in self.filetypes:", "parse them with the spider's method parse_page Rule(SgmlLinkExtractor(),callback='parse_page',follow=True), ) def __init__(self, domain=None, *a,", "site.xpath('@alt').extract() if (len(alt_text)>0) and (item['URL_Text'] == u''): item['URL_Datei'] = alt_text[0] item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1]", "item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format'] = u'Not interesting' item['geo'] = u'' item['URL_PARENT'] = parent_url", "response.headers): item['URL_Dateiname'] = unicode(response.headers['Content-Disposition'], 'utf-8') else: item['URL_Dateiname'] = unicode(item['URL_Datei']).split('/')[-1] item['Format'] = ext[1:] #if", "\"\").strip() #If that got us nothing, then look at the title and alt", "('Content-Disposition' in response.headers and ext in response.headers['Content-Disposition'].upper())): print \"Detected a downloadable, generated file\"", "any of the extensions (including the '.' in the filename, #then remove the", "= u'' item['URL_PARENT'] = parent_url item['Title_PARENT'] = parent_title #Is it a file (does", "= site.xpath('text()').extract() #If there's something there and it isn't a repetition, use it", "\"http://www.\" + domain + \"/\", ] #File types to search for (non-geo); list" ]
[ "[self.Obs] def addAgent(self, agent, random = False): raise NotImplementedError def step(self, actions): assert", "== 1, \"Converted Gym environments can not handle multiple agents\" self.Agent = agents[0]", "def randomStates(self, n): space = self.RandomObservationSpace or self.Env.observation_space return np.array([space.sample() for _ in", "def __str__(self): return \"GymEnv(%s)\" % (self.Env,) def randomStates(self, n): space = self.RandomObservationSpace or", "self.T = self.TLimit = tlimit def reset(self, random=False): self.T = self.TLimit state =", "self.TLimit = tlimit self.Env = env self.RandomObservationSpace = random_observation_space self.T = self.TLimit =", "-= 1 if self.T <= 0: done = True return obs, reward, done,", "\"Converted Gym environments can not handle multiple agents\" self.Agent = agents[0] obs =", "def __init__(self, env, tlimit=None, random_observation_space=None): if isinstance(env, str): env = gym.make(env) self.observation_space =", "return getattr(self.Env, name) class TimedGymEnv(object): def __init__(self, env, tlimit=None, random_observation_space=None): if isinstance(env, str):", "def __init__(self, env, tlimit=None, random_observation_space=None): if isinstance(env, str): env = gym.make(env) self.TLimit =", "n): space = self.RandomObservationSpace or self.Env.observation_space return np.array([space.sample() for _ in xrange(n)]) def", "self.T = self.TLimit return [self.Obs] def addAgent(self, agent, random = False): raise NotImplementedError", "self.T = self.TLimit state = self.Env.reset() if random: state = self.randomStates(1)[0] self.Env.state =", "state = self.randomStates(1)[0] self.Env.state = state return state def step(self, action): obs, reward,", "= self.Env.step(actions[0][1]) if self.T is not None: self.T -= 1 self.Done = self.Done", "str): env = gym.make(env) self.observation_space = env.observation_space self.action_space = env.action_space self.TLimit = tlimit", "= agents[0] obs = self.Env.reset() if random and self.RandomObservationSpace is not None: obs", "= self.TLimit = tlimit def reset(self, random=False): self.T = self.TLimit state = self.Env.reset()", "self.action_space = env.action_space self.TLimit = tlimit self.Env = env self.Obs = None self.Reward", "= tlimit def reset(self, random=False): self.T = self.TLimit state = self.Env.reset() if random:", "env.action_space self.TLimit = tlimit self.Env = env self.Obs = None self.Reward = None", "step(self, actions): assert len(actions) == 1, \"Converted Gym environments can not handle multiple", "def addAgent(self, agent, random = False): raise NotImplementedError def step(self, actions): assert len(actions)", "MultiGymEnv(MultiEnv): # # Convert 1-agent Gym environment into a multi-agent environment # NAgents", "1, \"Converted Gym environments can not handle multiple agents\" self.Agent = agents[0] obs", "state def step(self, action): obs, reward, done, info = self.Env.step(action) if self.T is", "reward, done, info = self.Env.step(action) if self.T is not None: self.T -= 1", "return False, [(self.Agent, self.Obs, self.Reward, self.Done, self.Info)] def __getattr__(self, name): return getattr(self.Env, name)", "def step(self, action): obs, reward, done, info = self.Env.step(action) if self.T is not", "= False self.Info = None self.RandomObservationSpace = random_observation_space def __str__(self): return \"GymEnv(%s)\" %", "= obs self.Done = False self.T = self.TLimit return [self.Obs] def addAgent(self, agent,", "NAgents = 1 def __init__(self, env, tlimit=None, random_observation_space=None): if isinstance(env, str): env =", "def step(self, actions): assert len(actions) == 1, \"Converted Gym environments can not handle", "not None: obs = self.RandomObservationSpace.sample() self.Env.state = obs self.Obs = obs self.Done =", "isinstance(env, str): env = gym.make(env) self.TLimit = tlimit self.Env = env self.RandomObservationSpace =", "self.Reward, self.Done, self.Info = self.Env.step(actions[0][1]) if self.T is not None: self.T -= 1", "= random_observation_space def __str__(self): return \"GymEnv(%s)\" % (self.Env,) def randomStates(self, n): space =", "Gym environment into a multi-agent environment # NAgents = 1 def __init__(self, env,", "self.Env.state = state return state def step(self, action): obs, reward, done, info =", "into a multi-agent environment # NAgents = 1 def __init__(self, env, tlimit=None, random_observation_space=None):", "self.T is not None: self.T -= 1 if self.T <= 0: done =", "in xrange(n)]) def randomActions(self, n): return np.array([self.Env.action_space.sample() for _ in xrange(n)]) def randomAction(self):", "__init__(self, env, tlimit=None, random_observation_space=None): if isinstance(env, str): env = gym.make(env) self.TLimit = tlimit", "in xrange(n)]) def randomActions(self, n): return np.array([self.Env.action_space.sample() for _ in xrange(n)]) def reset(self,", "self.Obs, self.Reward, self.Done, self.Info = self.Env.step(actions[0][1]) if self.T is not None: self.T -=", "or (self.T <= 0) def feedback(self): return False, [(self.Agent, self.Obs, self.Reward, self.Done, self.Info)]", "return \"GymEnv(%s)\" % (self.Env,) def randomStates(self, n): space = self.RandomObservationSpace or self.Env.observation_space return", "self.T <= 0: done = True return obs, reward, done, info def randomStates(self,", "MultiEnv(object): pass class MultiGymEnv(MultiEnv): # # Convert 1-agent Gym environment into a multi-agent", "xrange(n)]) def randomActions(self, n): return np.array([self.Env.action_space.sample() for _ in xrange(n)]) def reset(self, agents,", "self.Obs = obs self.Done = False self.T = self.TLimit return [self.Obs] def addAgent(self,", "for _ in xrange(n)]) def randomActions(self, n): return np.array([self.Env.action_space.sample() for _ in xrange(n)])", "= 1 def __init__(self, env, tlimit=None, random_observation_space=None): if isinstance(env, str): env = gym.make(env)", "= env self.RandomObservationSpace = random_observation_space self.T = self.TLimit = tlimit def reset(self, random=False):", "self.Info = None self.RandomObservationSpace = random_observation_space def __str__(self): return \"GymEnv(%s)\" % (self.Env,) def", "random_observation_space self.T = self.TLimit = tlimit def reset(self, random=False): self.T = self.TLimit state", "a multi-agent environment # NAgents = 1 def __init__(self, env, tlimit=None, random_observation_space=None): if", "not handle multiple agents\" self.Obs, self.Reward, self.Done, self.Info = self.Env.step(actions[0][1]) if self.T is", "= self.RandomObservationSpace or self.Env.observation_space return np.array([space.sample() for _ in xrange(n)]) def randomActions(self, n):", "getattr(self.Env, name) class TimedGymEnv(object): def __init__(self, env, tlimit=None, random_observation_space=None): if isinstance(env, str): env", "= self.randomStates(1)[0] self.Env.state = state return state def step(self, action): obs, reward, done,", "1, \"Converted Gym environments can not handle multiple agents\" self.Obs, self.Reward, self.Done, self.Info", "random: state = self.randomStates(1)[0] self.Env.state = state return state def step(self, action): obs,", "self.Obs, self.Reward, self.Done, self.Info)] def __getattr__(self, name): return getattr(self.Env, name) class TimedGymEnv(object): def", "(self.Env,) def randomStates(self, n): space = self.RandomObservationSpace or self.Env.observation_space return np.array([space.sample() for _", "def reset(self, agents, random = False): assert len(agents) == 1, \"Converted Gym environments", "env.observation_space self.action_space = env.action_space self.TLimit = tlimit self.Env = env self.Obs = None", "random_observation_space def __str__(self): return \"GymEnv(%s)\" % (self.Env,) def randomStates(self, n): space = self.RandomObservationSpace", "self.TLimit return [self.Obs] def addAgent(self, agent, random = False): raise NotImplementedError def step(self,", "space = self.RandomObservationSpace or self.Env.observation_space return np.array([space.sample() for _ in xrange(n)]) def randomActions(self,", "random and self.RandomObservationSpace is not None: obs = self.RandomObservationSpace.sample() self.Env.state = obs self.Obs", "1 if self.T <= 0: done = True return obs, reward, done, info", "self.T -= 1 self.Done = self.Done or (self.T <= 0) def feedback(self): return", "np.array([space.sample() for _ in xrange(n)]) def randomActions(self, n): return np.array([self.Env.action_space.sample() for _ in", "return np.array([self.Env.action_space.sample() for _ in xrange(n)]) def reset(self, agents, random = False): assert", "None: self.T -= 1 self.Done = self.Done or (self.T <= 0) def feedback(self):", "self.Info)] def __getattr__(self, name): return getattr(self.Env, name) class TimedGymEnv(object): def __init__(self, env, tlimit=None,", "np.array([self.Env.action_space.sample() for _ in xrange(n)]) def randomAction(self): return self.randomActions(1)[0] def __getattr__(self, name): return", "randomActions(self, n): return np.array([self.Env.action_space.sample() for _ in xrange(n)]) def reset(self, agents, random =", "n): return np.array([self.Env.action_space.sample() for _ in xrange(n)]) def reset(self, agents, random = False):", "tlimit self.Env = env self.RandomObservationSpace = random_observation_space self.T = self.TLimit = tlimit def", "= False self.T = self.TLimit return [self.Obs] def addAgent(self, agent, random = False):", "env = gym.make(env) self.TLimit = tlimit self.Env = env self.RandomObservationSpace = random_observation_space self.T", "self.T -= 1 if self.T <= 0: done = True return obs, reward,", "\"GymEnv(%s)\" % (self.Env,) def randomStates(self, n): space = self.RandomObservationSpace or self.Env.observation_space return np.array([space.sample()", "gym.make(env) self.observation_space = env.observation_space self.action_space = env.action_space self.TLimit = tlimit self.Env = env", "random_observation_space=None): if isinstance(env, str): env = gym.make(env) self.observation_space = env.observation_space self.action_space = env.action_space", "self.Agent = agents[0] obs = self.Env.reset() if random and self.RandomObservationSpace is not None:", "random=False): self.T = self.TLimit state = self.Env.reset() if random: state = self.randomStates(1)[0] self.Env.state", "= None self.Done = False self.Info = None self.RandomObservationSpace = random_observation_space def __str__(self):", "info def randomStates(self, n): space = self.RandomObservationSpace or self.Env.observation_space return np.array([space.sample() for _", "is not None: self.T -= 1 self.Done = self.Done or (self.T <= 0)", "__str__(self): return \"GymEnv(%s)\" % (self.Env,) def randomStates(self, n): space = self.RandomObservationSpace or self.Env.observation_space", "= None self.Reward = None self.Done = False self.Info = None self.RandomObservationSpace =", "state return state def step(self, action): obs, reward, done, info = self.Env.step(action) if", "environment # NAgents = 1 def __init__(self, env, tlimit=None, random_observation_space=None): if isinstance(env, str):", "<= 0: done = True return obs, reward, done, info def randomStates(self, n):", "env self.Obs = None self.Reward = None self.Done = False self.Info = None", "environments can not handle multiple agents\" self.Obs, self.Reward, self.Done, self.Info = self.Env.step(actions[0][1]) if", "self.Done, self.Info)] def __getattr__(self, name): return getattr(self.Env, name) class TimedGymEnv(object): def __init__(self, env,", "self.RandomObservationSpace.sample() self.Env.state = obs self.Obs = obs self.Done = False self.T = self.TLimit", "= env.action_space self.TLimit = tlimit self.Env = env self.Obs = None self.Reward =", "def randomActions(self, n): return np.array([self.Env.action_space.sample() for _ in xrange(n)]) def randomAction(self): return self.randomActions(1)[0]", "agents\" self.Agent = agents[0] obs = self.Env.reset() if random and self.RandomObservationSpace is not", "self.Env.step(actions[0][1]) if self.T is not None: self.T -= 1 self.Done = self.Done or", "if self.T <= 0: done = True return obs, reward, done, info def", "env, tlimit=None, random_observation_space=None): if isinstance(env, str): env = gym.make(env) self.TLimit = tlimit self.Env", "return obs, reward, done, info def randomStates(self, n): space = self.RandomObservationSpace or self.Env.observation_space", "return np.array([self.Env.action_space.sample() for _ in xrange(n)]) def randomAction(self): return self.randomActions(1)[0] def __getattr__(self, name):", "self.RandomObservationSpace = random_observation_space def __str__(self): return \"GymEnv(%s)\" % (self.Env,) def randomStates(self, n): space", "None self.Reward = None self.Done = False self.Info = None self.RandomObservationSpace = random_observation_space", "gym, random, numpy as np class MultiEnv(object): pass class MultiGymEnv(MultiEnv): # # Convert", "name) class TimedGymEnv(object): def __init__(self, env, tlimit=None, random_observation_space=None): if isinstance(env, str): env =", "random_observation_space=None): if isinstance(env, str): env = gym.make(env) self.TLimit = tlimit self.Env = env", "numpy as np class MultiEnv(object): pass class MultiGymEnv(MultiEnv): # # Convert 1-agent Gym", "is not None: obs = self.RandomObservationSpace.sample() self.Env.state = obs self.Obs = obs self.Done", "= gym.make(env) self.TLimit = tlimit self.Env = env self.RandomObservationSpace = random_observation_space self.T =", "name): return getattr(self.Env, name) class TimedGymEnv(object): def __init__(self, env, tlimit=None, random_observation_space=None): if isinstance(env,", "if random: state = self.randomStates(1)[0] self.Env.state = state return state def step(self, action):", "= state return state def step(self, action): obs, reward, done, info = self.Env.step(action)", "multi-agent environment # NAgents = 1 def __init__(self, env, tlimit=None, random_observation_space=None): if isinstance(env,", "len(agents) == 1, \"Converted Gym environments can not handle multiple agents\" self.Agent =", "not None: self.T -= 1 if self.T <= 0: done = True return", "if self.T is not None: self.T -= 1 self.Done = self.Done or (self.T", "self.TLimit state = self.Env.reset() if random: state = self.randomStates(1)[0] self.Env.state = state return", "done = True return obs, reward, done, info def randomStates(self, n): space =", "addAgent(self, agent, random = False): raise NotImplementedError def step(self, actions): assert len(actions) ==", "env, tlimit=None, random_observation_space=None): if isinstance(env, str): env = gym.make(env) self.observation_space = env.observation_space self.action_space", "obs = self.RandomObservationSpace.sample() self.Env.state = obs self.Obs = obs self.Done = False self.T", "isinstance(env, str): env = gym.make(env) self.observation_space = env.observation_space self.action_space = env.action_space self.TLimit =", "env = gym.make(env) self.observation_space = env.observation_space self.action_space = env.action_space self.TLimit = tlimit self.Env", "self.Env.reset() if random and self.RandomObservationSpace is not None: obs = self.RandomObservationSpace.sample() self.Env.state =", "agent, random = False): raise NotImplementedError def step(self, actions): assert len(actions) == 1,", "self.Done, self.Info = self.Env.step(actions[0][1]) if self.T is not None: self.T -= 1 self.Done", "environments can not handle multiple agents\" self.Agent = agents[0] obs = self.Env.reset() if", "assert len(agents) == 1, \"Converted Gym environments can not handle multiple agents\" self.Agent", "= self.Env.step(action) if self.T is not None: self.T -= 1 if self.T <=", "self.randomStates(1)[0] self.Env.state = state return state def step(self, action): obs, reward, done, info", "environment into a multi-agent environment # NAgents = 1 def __init__(self, env, tlimit=None,", "tlimit self.Env = env self.Obs = None self.Reward = None self.Done = False", "or self.Env.observation_space return np.array([space.sample() for _ in xrange(n)]) def randomActions(self, n): return np.array([self.Env.action_space.sample()", "xrange(n)]) def randomActions(self, n): return np.array([self.Env.action_space.sample() for _ in xrange(n)]) def randomAction(self): return", "= random_observation_space self.T = self.TLimit = tlimit def reset(self, random=False): self.T = self.TLimit", "import gym, random, numpy as np class MultiEnv(object): pass class MultiGymEnv(MultiEnv): # #", "str): env = gym.make(env) self.TLimit = tlimit self.Env = env self.RandomObservationSpace = random_observation_space", "step(self, action): obs, reward, done, info = self.Env.step(action) if self.T is not None:", "False): assert len(agents) == 1, \"Converted Gym environments can not handle multiple agents\"", "randomStates(self, n): space = self.RandomObservationSpace or self.Env.observation_space return np.array([space.sample() for _ in xrange(n)])", "= True return obs, reward, done, info def randomStates(self, n): space = self.RandomObservationSpace", "pass class MultiGymEnv(MultiEnv): # # Convert 1-agent Gym environment into a multi-agent environment", "= env.observation_space self.action_space = env.action_space self.TLimit = tlimit self.Env = env self.Obs =", "self.Env.observation_space return np.array([space.sample() for _ in xrange(n)]) def randomActions(self, n): return np.array([self.Env.action_space.sample() for", "= False): assert len(agents) == 1, \"Converted Gym environments can not handle multiple", "1-agent Gym environment into a multi-agent environment # NAgents = 1 def __init__(self,", "handle multiple agents\" self.Agent = agents[0] obs = self.Env.reset() if random and self.RandomObservationSpace", "obs, reward, done, info = self.Env.step(action) if self.T is not None: self.T -=", "as np class MultiEnv(object): pass class MultiGymEnv(MultiEnv): # # Convert 1-agent Gym environment", "and self.RandomObservationSpace is not None: obs = self.RandomObservationSpace.sample() self.Env.state = obs self.Obs =", "[(self.Agent, self.Obs, self.Reward, self.Done, self.Info)] def __getattr__(self, name): return getattr(self.Env, name) class TimedGymEnv(object):", "len(actions) == 1, \"Converted Gym environments can not handle multiple agents\" self.Obs, self.Reward,", "def __getattr__(self, name): return getattr(self.Env, name) class TimedGymEnv(object): def __init__(self, env, tlimit=None, random_observation_space=None):", "state = self.Env.reset() if random: state = self.randomStates(1)[0] self.Env.state = state return state", "obs, reward, done, info def randomStates(self, n): space = self.RandomObservationSpace or self.Env.observation_space return", "n): return np.array([self.Env.action_space.sample() for _ in xrange(n)]) def randomAction(self): return self.randomActions(1)[0] def __getattr__(self,", "None self.RandomObservationSpace = random_observation_space def __str__(self): return \"GymEnv(%s)\" % (self.Env,) def randomStates(self, n):", "__init__(self, env, tlimit=None, random_observation_space=None): if isinstance(env, str): env = gym.make(env) self.observation_space = env.observation_space", "feedback(self): return False, [(self.Agent, self.Obs, self.Reward, self.Done, self.Info)] def __getattr__(self, name): return getattr(self.Env,", "# # Convert 1-agent Gym environment into a multi-agent environment # NAgents =", "xrange(n)]) def reset(self, agents, random = False): assert len(agents) == 1, \"Converted Gym", "False): raise NotImplementedError def step(self, actions): assert len(actions) == 1, \"Converted Gym environments", "None: self.T -= 1 if self.T <= 0: done = True return obs,", "= env self.Obs = None self.Reward = None self.Done = False self.Info =", "done, info = self.Env.step(action) if self.T is not None: self.T -= 1 if", "can not handle multiple agents\" self.Agent = agents[0] obs = self.Env.reset() if random", "Gym environments can not handle multiple agents\" self.Agent = agents[0] obs = self.Env.reset()", "False self.T = self.TLimit return [self.Obs] def addAgent(self, agent, random = False): raise", "None: obs = self.RandomObservationSpace.sample() self.Env.state = obs self.Obs = obs self.Done = False", "agents[0] obs = self.Env.reset() if random and self.RandomObservationSpace is not None: obs =", "self.Env.state = obs self.Obs = obs self.Done = False self.T = self.TLimit return", "obs = self.Env.reset() if random and self.RandomObservationSpace is not None: obs = self.RandomObservationSpace.sample()", "<filename>kerlas/gym_env.py<gh_stars>0 import gym, random, numpy as np class MultiEnv(object): pass class MultiGymEnv(MultiEnv): #", "class TimedGymEnv(object): def __init__(self, env, tlimit=None, random_observation_space=None): if isinstance(env, str): env = gym.make(env)", "self.Env = env self.Obs = None self.Reward = None self.Done = False self.Info", "class MultiEnv(object): pass class MultiGymEnv(MultiEnv): # # Convert 1-agent Gym environment into a", "self.TLimit = tlimit def reset(self, random=False): self.T = self.TLimit state = self.Env.reset() if", "can not handle multiple agents\" self.Obs, self.Reward, self.Done, self.Info = self.Env.step(actions[0][1]) if self.T", "= self.Done or (self.T <= 0) def feedback(self): return False, [(self.Agent, self.Obs, self.Reward,", "= self.RandomObservationSpace.sample() self.Env.state = obs self.Obs = obs self.Done = False self.T =", "self.Done = self.Done or (self.T <= 0) def feedback(self): return False, [(self.Agent, self.Obs,", "= obs self.Obs = obs self.Done = False self.T = self.TLimit return [self.Obs]", "= self.TLimit state = self.Env.reset() if random: state = self.randomStates(1)[0] self.Env.state = state", "not None: self.T -= 1 self.Done = self.Done or (self.T <= 0) def", "None self.Done = False self.Info = None self.RandomObservationSpace = random_observation_space def __str__(self): return", "multiple agents\" self.Obs, self.Reward, self.Done, self.Info = self.Env.step(actions[0][1]) if self.T is not None:", "self.Reward, self.Done, self.Info)] def __getattr__(self, name): return getattr(self.Env, name) class TimedGymEnv(object): def __init__(self,", "agents, random = False): assert len(agents) == 1, \"Converted Gym environments can not", "assert len(actions) == 1, \"Converted Gym environments can not handle multiple agents\" self.Obs,", "Gym environments can not handle multiple agents\" self.Obs, self.Reward, self.Done, self.Info = self.Env.step(actions[0][1])", "\"Converted Gym environments can not handle multiple agents\" self.Obs, self.Reward, self.Done, self.Info =", "-= 1 self.Done = self.Done or (self.T <= 0) def feedback(self): return False,", "self.T is not None: self.T -= 1 self.Done = self.Done or (self.T <=", "__getattr__(self, name): return getattr(self.Env, name) class TimedGymEnv(object): def __init__(self, env, tlimit=None, random_observation_space=None): if", "= self.Env.reset() if random: state = self.randomStates(1)[0] self.Env.state = state return state def", "_ in xrange(n)]) def reset(self, agents, random = False): assert len(agents) == 1,", "obs self.Obs = obs self.Done = False self.T = self.TLimit return [self.Obs] def", "True return obs, reward, done, info def randomStates(self, n): space = self.RandomObservationSpace or", "= tlimit self.Env = env self.Obs = None self.Reward = None self.Done =", "self.TLimit = tlimit self.Env = env self.Obs = None self.Reward = None self.Done", "in xrange(n)]) def reset(self, agents, random = False): assert len(agents) == 1, \"Converted", "handle multiple agents\" self.Obs, self.Reward, self.Done, self.Info = self.Env.step(actions[0][1]) if self.T is not", "for _ in xrange(n)]) def randomAction(self): return self.randomActions(1)[0] def __getattr__(self, name): return getattr(self.Env,", "self.Reward = None self.Done = False self.Info = None self.RandomObservationSpace = random_observation_space def", "NotImplementedError def step(self, actions): assert len(actions) == 1, \"Converted Gym environments can not", "self.Env.reset() if random: state = self.randomStates(1)[0] self.Env.state = state return state def step(self,", "= self.TLimit return [self.Obs] def addAgent(self, agent, random = False): raise NotImplementedError def", "# NAgents = 1 def __init__(self, env, tlimit=None, random_observation_space=None): if isinstance(env, str): env", "if self.T is not None: self.T -= 1 if self.T <= 0: done", "== 1, \"Converted Gym environments can not handle multiple agents\" self.Obs, self.Reward, self.Done,", "_ in xrange(n)]) def randomAction(self): return self.randomActions(1)[0] def __getattr__(self, name): return getattr(self.Env, name)", "class MultiGymEnv(MultiEnv): # # Convert 1-agent Gym environment into a multi-agent environment #", "# Convert 1-agent Gym environment into a multi-agent environment # NAgents = 1", "self.Info = self.Env.step(actions[0][1]) if self.T is not None: self.T -= 1 self.Done =", "tlimit=None, random_observation_space=None): if isinstance(env, str): env = gym.make(env) self.TLimit = tlimit self.Env =", "np class MultiEnv(object): pass class MultiGymEnv(MultiEnv): # # Convert 1-agent Gym environment into", "random = False): raise NotImplementedError def step(self, actions): assert len(actions) == 1, \"Converted", "self.RandomObservationSpace or self.Env.observation_space return np.array([space.sample() for _ in xrange(n)]) def randomActions(self, n): return", "= False): raise NotImplementedError def step(self, actions): assert len(actions) == 1, \"Converted Gym", "gym.make(env) self.TLimit = tlimit self.Env = env self.RandomObservationSpace = random_observation_space self.T = self.TLimit", "= tlimit self.Env = env self.RandomObservationSpace = random_observation_space self.T = self.TLimit = tlimit", "def reset(self, random=False): self.T = self.TLimit state = self.Env.reset() if random: state =", "tlimit def reset(self, random=False): self.T = self.TLimit state = self.Env.reset() if random: state", "self.observation_space = env.observation_space self.action_space = env.action_space self.TLimit = tlimit self.Env = env self.Obs", "def randomActions(self, n): return np.array([self.Env.action_space.sample() for _ in xrange(n)]) def reset(self, agents, random", "not handle multiple agents\" self.Agent = agents[0] obs = self.Env.reset() if random and", "self.Done = False self.Info = None self.RandomObservationSpace = random_observation_space def __str__(self): return \"GymEnv(%s)\"", "self.RandomObservationSpace = random_observation_space self.T = self.TLimit = tlimit def reset(self, random=False): self.T =", "if isinstance(env, str): env = gym.make(env) self.TLimit = tlimit self.Env = env self.RandomObservationSpace", "obs self.Done = False self.T = self.TLimit return [self.Obs] def addAgent(self, agent, random", "= self.Env.reset() if random and self.RandomObservationSpace is not None: obs = self.RandomObservationSpace.sample() self.Env.state", "TimedGymEnv(object): def __init__(self, env, tlimit=None, random_observation_space=None): if isinstance(env, str): env = gym.make(env) self.TLimit", "actions): assert len(actions) == 1, \"Converted Gym environments can not handle multiple agents\"", "for _ in xrange(n)]) def reset(self, agents, random = False): assert len(agents) ==", "is not None: self.T -= 1 if self.T <= 0: done = True", "Convert 1-agent Gym environment into a multi-agent environment # NAgents = 1 def", "return np.array([space.sample() for _ in xrange(n)]) def randomActions(self, n): return np.array([self.Env.action_space.sample() for _", "random, numpy as np class MultiEnv(object): pass class MultiGymEnv(MultiEnv): # # Convert 1-agent", "self.Env.step(action) if self.T is not None: self.T -= 1 if self.T <= 0:", "done, info def randomStates(self, n): space = self.RandomObservationSpace or self.Env.observation_space return np.array([space.sample() for", "np.array([self.Env.action_space.sample() for _ in xrange(n)]) def reset(self, agents, random = False): assert len(agents)", "randomActions(self, n): return np.array([self.Env.action_space.sample() for _ in xrange(n)]) def randomAction(self): return self.randomActions(1)[0] def", "info = self.Env.step(action) if self.T is not None: self.T -= 1 if self.T", "self.Obs = None self.Reward = None self.Done = False self.Info = None self.RandomObservationSpace", "if random and self.RandomObservationSpace is not None: obs = self.RandomObservationSpace.sample() self.Env.state = obs", "_ in xrange(n)]) def randomActions(self, n): return np.array([self.Env.action_space.sample() for _ in xrange(n)]) def", "random = False): assert len(agents) == 1, \"Converted Gym environments can not handle", "multiple agents\" self.Agent = agents[0] obs = self.Env.reset() if random and self.RandomObservationSpace is", "self.RandomObservationSpace is not None: obs = self.RandomObservationSpace.sample() self.Env.state = obs self.Obs = obs", "def feedback(self): return False, [(self.Agent, self.Obs, self.Reward, self.Done, self.Info)] def __getattr__(self, name): return", "return [self.Obs] def addAgent(self, agent, random = False): raise NotImplementedError def step(self, actions):", "env self.RandomObservationSpace = random_observation_space self.T = self.TLimit = tlimit def reset(self, random=False): self.T", "= gym.make(env) self.observation_space = env.observation_space self.action_space = env.action_space self.TLimit = tlimit self.Env =", "1 self.Done = self.Done or (self.T <= 0) def feedback(self): return False, [(self.Agent,", "self.Done or (self.T <= 0) def feedback(self): return False, [(self.Agent, self.Obs, self.Reward, self.Done,", "reset(self, random=False): self.T = self.TLimit state = self.Env.reset() if random: state = self.randomStates(1)[0]", "% (self.Env,) def randomStates(self, n): space = self.RandomObservationSpace or self.Env.observation_space return np.array([space.sample() for", "self.Done = False self.T = self.TLimit return [self.Obs] def addAgent(self, agent, random =", "agents\" self.Obs, self.Reward, self.Done, self.Info = self.Env.step(actions[0][1]) if self.T is not None: self.T", "return state def step(self, action): obs, reward, done, info = self.Env.step(action) if self.T", "False self.Info = None self.RandomObservationSpace = random_observation_space def __str__(self): return \"GymEnv(%s)\" % (self.Env,)", "reset(self, agents, random = False): assert len(agents) == 1, \"Converted Gym environments can", "self.Env = env self.RandomObservationSpace = random_observation_space self.T = self.TLimit = tlimit def reset(self,", "raise NotImplementedError def step(self, actions): assert len(actions) == 1, \"Converted Gym environments can", "if isinstance(env, str): env = gym.make(env) self.observation_space = env.observation_space self.action_space = env.action_space self.TLimit", "tlimit=None, random_observation_space=None): if isinstance(env, str): env = gym.make(env) self.observation_space = env.observation_space self.action_space =", "0) def feedback(self): return False, [(self.Agent, self.Obs, self.Reward, self.Done, self.Info)] def __getattr__(self, name):", "False, [(self.Agent, self.Obs, self.Reward, self.Done, self.Info)] def __getattr__(self, name): return getattr(self.Env, name) class", "<= 0) def feedback(self): return False, [(self.Agent, self.Obs, self.Reward, self.Done, self.Info)] def __getattr__(self,", "reward, done, info def randomStates(self, n): space = self.RandomObservationSpace or self.Env.observation_space return np.array([space.sample()", "action): obs, reward, done, info = self.Env.step(action) if self.T is not None: self.T", "(self.T <= 0) def feedback(self): return False, [(self.Agent, self.Obs, self.Reward, self.Done, self.Info)] def", "1 def __init__(self, env, tlimit=None, random_observation_space=None): if isinstance(env, str): env = gym.make(env) self.observation_space", "0: done = True return obs, reward, done, info def randomStates(self, n): space", "= None self.RandomObservationSpace = random_observation_space def __str__(self): return \"GymEnv(%s)\" % (self.Env,) def randomStates(self," ]
[ "{ \"type\": \"Feature\", \"geometry\": array[i][len(array[i])-1], \"properties\": { }, } for j in range(len(array[i])-1):", "\"FeatureCollection\", \"features\": [] } features = [] for i in range(1, len(array)): feature", "= { \"type\": \"Feature\", \"geometry\": array[i][len(array[i])-1], \"properties\": { }, } for j in", "array_to_geojson(array): props = [] for i in range(len(array[0])-1): props.append(array[0][i]) feature_collection = { \"type\":", "range(1, len(array)): feature = { \"type\": \"Feature\", \"geometry\": array[i][len(array[i])-1], \"properties\": { }, }", "} features = [] for i in range(1, len(array)): feature = { \"type\":", "\"type\": \"Feature\", \"geometry\": array[i][len(array[i])-1], \"properties\": { }, } for j in range(len(array[i])-1): feature['properties'][props[j]]", "\"type\": \"FeatureCollection\", \"features\": [] } features = [] for i in range(1, len(array)):", "in range(1, len(array)): feature = { \"type\": \"Feature\", \"geometry\": array[i][len(array[i])-1], \"properties\": { },", "{ }, } for j in range(len(array[i])-1): feature['properties'][props[j]] = array[i][j] feature_collection[\"features\"].append(feature) return json.dumps(feature_collection)", "i in range(1, len(array)): feature = { \"type\": \"Feature\", \"geometry\": array[i][len(array[i])-1], \"properties\": {", "feature = { \"type\": \"Feature\", \"geometry\": array[i][len(array[i])-1], \"properties\": { }, } for j", "\"geometry\": array[i][len(array[i])-1], \"properties\": { }, } for j in range(len(array[i])-1): feature['properties'][props[j]] = array[i][j]", "= [] for i in range(len(array[0])-1): props.append(array[0][i]) feature_collection = { \"type\": \"FeatureCollection\", \"features\":", "i in range(len(array[0])-1): props.append(array[0][i]) feature_collection = { \"type\": \"FeatureCollection\", \"features\": [] } features", "\"features\": [] } features = [] for i in range(1, len(array)): feature =", "props = [] for i in range(len(array[0])-1): props.append(array[0][i]) feature_collection = { \"type\": \"FeatureCollection\",", "= [] for i in range(1, len(array)): feature = { \"type\": \"Feature\", \"geometry\":", "import json def array_to_geojson(array): props = [] for i in range(len(array[0])-1): props.append(array[0][i]) feature_collection", "for i in range(1, len(array)): feature = { \"type\": \"Feature\", \"geometry\": array[i][len(array[i])-1], \"properties\":", "feature_collection = { \"type\": \"FeatureCollection\", \"features\": [] } features = [] for i", "= { \"type\": \"FeatureCollection\", \"features\": [] } features = [] for i in", "props.append(array[0][i]) feature_collection = { \"type\": \"FeatureCollection\", \"features\": [] } features = [] for", "in range(len(array[0])-1): props.append(array[0][i]) feature_collection = { \"type\": \"FeatureCollection\", \"features\": [] } features =", "features = [] for i in range(1, len(array)): feature = { \"type\": \"Feature\",", "json def array_to_geojson(array): props = [] for i in range(len(array[0])-1): props.append(array[0][i]) feature_collection =", "len(array)): feature = { \"type\": \"Feature\", \"geometry\": array[i][len(array[i])-1], \"properties\": { }, } for", "[] } features = [] for i in range(1, len(array)): feature = {", "{ \"type\": \"FeatureCollection\", \"features\": [] } features = [] for i in range(1,", "range(len(array[0])-1): props.append(array[0][i]) feature_collection = { \"type\": \"FeatureCollection\", \"features\": [] } features = []", "for i in range(len(array[0])-1): props.append(array[0][i]) feature_collection = { \"type\": \"FeatureCollection\", \"features\": [] }", "\"properties\": { }, } for j in range(len(array[i])-1): feature['properties'][props[j]] = array[i][j] feature_collection[\"features\"].append(feature) return", "[] for i in range(1, len(array)): feature = { \"type\": \"Feature\", \"geometry\": array[i][len(array[i])-1],", "\"Feature\", \"geometry\": array[i][len(array[i])-1], \"properties\": { }, } for j in range(len(array[i])-1): feature['properties'][props[j]] =", "[] for i in range(len(array[0])-1): props.append(array[0][i]) feature_collection = { \"type\": \"FeatureCollection\", \"features\": []", "array[i][len(array[i])-1], \"properties\": { }, } for j in range(len(array[i])-1): feature['properties'][props[j]] = array[i][j] feature_collection[\"features\"].append(feature)", "<reponame>erictheise/trctr-pllr import json def array_to_geojson(array): props = [] for i in range(len(array[0])-1): props.append(array[0][i])", "def array_to_geojson(array): props = [] for i in range(len(array[0])-1): props.append(array[0][i]) feature_collection = {" ]
[ "setuptools import setup, find_packages from os import path this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory,", "'Intended Audience :: Science/Research', 'Operating System :: POSIX :: Linux', 'Development Status ::", "Python :: 3', 'Topic :: Scientific/Engineering', 'Intended Audience :: Science/Research', 'Operating System ::", "Framework\", long_description=readme_as_long_description, long_description_content_type=\"text/markdown\", keywords=\"ecosystem modelling framework inverse-modelling\", url=\"https://github.com/465b/nemf/\", classifiers=[ 'License :: OSI Approved", "metadata to display on PyPI author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Network-based ecosystem Modelling Framework\", long_description=readme_as_long_description, long_description_content_type=\"text/markdown\",", "from os import path this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f:", "as f: readme_as_long_description = f.read() setup( name=\"nemf\", version=\"0.3.4\", packages=find_packages(), # install_requires=[\"\"], # metadata", "f: readme_as_long_description = f.read() setup( name=\"nemf\", version=\"0.3.4\", packages=find_packages(), # install_requires=[\"\"], # metadata to", ":: Science/Research', 'Operating System :: POSIX :: Linux', 'Development Status :: 3 -", "open(path.join(this_directory, 'README.md'), encoding='utf-8') as f: readme_as_long_description = f.read() setup( name=\"nemf\", version=\"0.3.4\", packages=find_packages(), #", "long_description_content_type=\"text/markdown\", keywords=\"ecosystem modelling framework inverse-modelling\", url=\"https://github.com/465b/nemf/\", classifiers=[ 'License :: OSI Approved :: BSD", "PyPI author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Network-based ecosystem Modelling Framework\", long_description=readme_as_long_description, long_description_content_type=\"text/markdown\", keywords=\"ecosystem modelling framework inverse-modelling\",", "<filename>setup.py from setuptools import setup, find_packages from os import path this_directory = path.abspath(path.dirname(__file__))", "os import path this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f: readme_as_long_description", "framework inverse-modelling\", url=\"https://github.com/465b/nemf/\", classifiers=[ 'License :: OSI Approved :: BSD License', 'Programming Language", "path.abspath(path.dirname(__file__)) with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f: readme_as_long_description = f.read() setup( name=\"nemf\", version=\"0.3.4\",", "modelling framework inverse-modelling\", url=\"https://github.com/465b/nemf/\", classifiers=[ 'License :: OSI Approved :: BSD License', 'Programming", "Linux', 'Development Status :: 3 - Alpha', ], license='BSD', install_requires=[ 'numpy','seaborn','pandas','matplotlib','networkx','pyyaml', 'termcolor'] )", "display on PyPI author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Network-based ecosystem Modelling Framework\", long_description=readme_as_long_description, long_description_content_type=\"text/markdown\", keywords=\"ecosystem modelling", "Scientific/Engineering', 'Intended Audience :: Science/Research', 'Operating System :: POSIX :: Linux', 'Development Status", "3', 'Topic :: Scientific/Engineering', 'Intended Audience :: Science/Research', 'Operating System :: POSIX ::", ":: OSI Approved :: BSD License', 'Programming Language :: Python :: 3', 'Topic", "System :: POSIX :: Linux', 'Development Status :: 3 - Alpha', ], license='BSD',", "import path this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f: readme_as_long_description =", "description=\"Network-based ecosystem Modelling Framework\", long_description=readme_as_long_description, long_description_content_type=\"text/markdown\", keywords=\"ecosystem modelling framework inverse-modelling\", url=\"https://github.com/465b/nemf/\", classifiers=[ 'License", "POSIX :: Linux', 'Development Status :: 3 - Alpha', ], license='BSD', install_requires=[ 'numpy','seaborn','pandas','matplotlib','networkx','pyyaml',", ":: 3', 'Topic :: Scientific/Engineering', 'Intended Audience :: Science/Research', 'Operating System :: POSIX", "Science/Research', 'Operating System :: POSIX :: Linux', 'Development Status :: 3 - Alpha',", "from setuptools import setup, find_packages from os import path this_directory = path.abspath(path.dirname(__file__)) with", "= path.abspath(path.dirname(__file__)) with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f: readme_as_long_description = f.read() setup( name=\"nemf\",", ":: BSD License', 'Programming Language :: Python :: 3', 'Topic :: Scientific/Engineering', 'Intended", "version=\"0.3.4\", packages=find_packages(), # install_requires=[\"\"], # metadata to display on PyPI author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Network-based", "f.read() setup( name=\"nemf\", version=\"0.3.4\", packages=find_packages(), # install_requires=[\"\"], # metadata to display on PyPI", "License', 'Programming Language :: Python :: 3', 'Topic :: Scientific/Engineering', 'Intended Audience ::", "'Topic :: Scientific/Engineering', 'Intended Audience :: Science/Research', 'Operating System :: POSIX :: Linux',", "on PyPI author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Network-based ecosystem Modelling Framework\", long_description=readme_as_long_description, long_description_content_type=\"text/markdown\", keywords=\"ecosystem modelling framework", "Approved :: BSD License', 'Programming Language :: Python :: 3', 'Topic :: Scientific/Engineering',", "readme_as_long_description = f.read() setup( name=\"nemf\", version=\"0.3.4\", packages=find_packages(), # install_requires=[\"\"], # metadata to display", "name=\"nemf\", version=\"0.3.4\", packages=find_packages(), # install_requires=[\"\"], # metadata to display on PyPI author=\"<NAME>\", author_email=\"<EMAIL>\",", "inverse-modelling\", url=\"https://github.com/465b/nemf/\", classifiers=[ 'License :: OSI Approved :: BSD License', 'Programming Language ::", "setup, find_packages from os import path this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, 'README.md'), encoding='utf-8')", "'Programming Language :: Python :: 3', 'Topic :: Scientific/Engineering', 'Intended Audience :: Science/Research',", ":: Scientific/Engineering', 'Intended Audience :: Science/Research', 'Operating System :: POSIX :: Linux', 'Development", ":: POSIX :: Linux', 'Development Status :: 3 - Alpha', ], license='BSD', install_requires=[", "# install_requires=[\"\"], # metadata to display on PyPI author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Network-based ecosystem Modelling", "'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3',", "keywords=\"ecosystem modelling framework inverse-modelling\", url=\"https://github.com/465b/nemf/\", classifiers=[ 'License :: OSI Approved :: BSD License',", "install_requires=[\"\"], # metadata to display on PyPI author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Network-based ecosystem Modelling Framework\",", "url=\"https://github.com/465b/nemf/\", classifiers=[ 'License :: OSI Approved :: BSD License', 'Programming Language :: Python", "classifiers=[ 'License :: OSI Approved :: BSD License', 'Programming Language :: Python ::", "Language :: Python :: 3', 'Topic :: Scientific/Engineering', 'Intended Audience :: Science/Research', 'Operating", "Audience :: Science/Research', 'Operating System :: POSIX :: Linux', 'Development Status :: 3", "# metadata to display on PyPI author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Network-based ecosystem Modelling Framework\", long_description=readme_as_long_description,", "this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f: readme_as_long_description = f.read() setup(", "encoding='utf-8') as f: readme_as_long_description = f.read() setup( name=\"nemf\", version=\"0.3.4\", packages=find_packages(), # install_requires=[\"\"], #", "BSD License', 'Programming Language :: Python :: 3', 'Topic :: Scientific/Engineering', 'Intended Audience", "OSI Approved :: BSD License', 'Programming Language :: Python :: 3', 'Topic ::", "to display on PyPI author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Network-based ecosystem Modelling Framework\", long_description=readme_as_long_description, long_description_content_type=\"text/markdown\", keywords=\"ecosystem", "packages=find_packages(), # install_requires=[\"\"], # metadata to display on PyPI author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Network-based ecosystem", "author_email=\"<EMAIL>\", description=\"Network-based ecosystem Modelling Framework\", long_description=readme_as_long_description, long_description_content_type=\"text/markdown\", keywords=\"ecosystem modelling framework inverse-modelling\", url=\"https://github.com/465b/nemf/\", classifiers=[", "author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Network-based ecosystem Modelling Framework\", long_description=readme_as_long_description, long_description_content_type=\"text/markdown\", keywords=\"ecosystem modelling framework inverse-modelling\", url=\"https://github.com/465b/nemf/\",", "setup( name=\"nemf\", version=\"0.3.4\", packages=find_packages(), # install_requires=[\"\"], # metadata to display on PyPI author=\"<NAME>\",", ":: Linux', 'Development Status :: 3 - Alpha', ], license='BSD', install_requires=[ 'numpy','seaborn','pandas','matplotlib','networkx','pyyaml', 'termcolor']", "path this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f: readme_as_long_description = f.read()", "import setup, find_packages from os import path this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, 'README.md'),", "with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f: readme_as_long_description = f.read() setup( name=\"nemf\", version=\"0.3.4\", packages=find_packages(),", "find_packages from os import path this_directory = path.abspath(path.dirname(__file__)) with open(path.join(this_directory, 'README.md'), encoding='utf-8') as", "'README.md'), encoding='utf-8') as f: readme_as_long_description = f.read() setup( name=\"nemf\", version=\"0.3.4\", packages=find_packages(), # install_requires=[\"\"],", "ecosystem Modelling Framework\", long_description=readme_as_long_description, long_description_content_type=\"text/markdown\", keywords=\"ecosystem modelling framework inverse-modelling\", url=\"https://github.com/465b/nemf/\", classifiers=[ 'License ::", ":: Python :: 3', 'Topic :: Scientific/Engineering', 'Intended Audience :: Science/Research', 'Operating System", "= f.read() setup( name=\"nemf\", version=\"0.3.4\", packages=find_packages(), # install_requires=[\"\"], # metadata to display on", "'Operating System :: POSIX :: Linux', 'Development Status :: 3 - Alpha', ],", "Modelling Framework\", long_description=readme_as_long_description, long_description_content_type=\"text/markdown\", keywords=\"ecosystem modelling framework inverse-modelling\", url=\"https://github.com/465b/nemf/\", classifiers=[ 'License :: OSI", "long_description=readme_as_long_description, long_description_content_type=\"text/markdown\", keywords=\"ecosystem modelling framework inverse-modelling\", url=\"https://github.com/465b/nemf/\", classifiers=[ 'License :: OSI Approved ::" ]
[ "class Garage(object): def __init__(self, name: str, key: str, cars: List[Train], volume: int): self.name", "List from .train import Train class Garage(object): def __init__(self, name: str, key: str,", "List[Train], volume: int): self.name = name self.key = key self.cars = cars self.volume", ".train import Train class Garage(object): def __init__(self, name: str, key: str, cars: List[Train],", "Garage(object): def __init__(self, name: str, key: str, cars: List[Train], volume: int): self.name =", "def __init__(self, name: str, key: str, cars: List[Train], volume: int): self.name = name", "import List from .train import Train class Garage(object): def __init__(self, name: str, key:", "from .train import Train class Garage(object): def __init__(self, name: str, key: str, cars:", "<reponame>lucassm02/fiap-cptm from typing import List from .train import Train class Garage(object): def __init__(self,", "str, cars: List[Train], volume: int): self.name = name self.key = key self.cars =", "name: str, key: str, cars: List[Train], volume: int): self.name = name self.key =", "int): self.name = name self.key = key self.cars = cars self.volume = volume", "volume: int): self.name = name self.key = key self.cars = cars self.volume =", "key: str, cars: List[Train], volume: int): self.name = name self.key = key self.cars", "Train class Garage(object): def __init__(self, name: str, key: str, cars: List[Train], volume: int):", "__init__(self, name: str, key: str, cars: List[Train], volume: int): self.name = name self.key", "import Train class Garage(object): def __init__(self, name: str, key: str, cars: List[Train], volume:", "typing import List from .train import Train class Garage(object): def __init__(self, name: str,", "cars: List[Train], volume: int): self.name = name self.key = key self.cars = cars", "from typing import List from .train import Train class Garage(object): def __init__(self, name:", "str, key: str, cars: List[Train], volume: int): self.name = name self.key = key" ]
[ "size = int(input()) matrix = [[int(x) for x in input().split()] for _ in", "int(input()) matrix = [[int(x) for x in input().split()] for _ in range(size)] print(sum([matrix[x][x]", "matrix = [[int(x) for x in input().split()] for _ in range(size)] print(sum([matrix[x][x] for", "= int(input()) matrix = [[int(x) for x in input().split()] for _ in range(size)]", "[[int(x) for x in input().split()] for _ in range(size)] print(sum([matrix[x][x] for x in", "= [[int(x) for x in input().split()] for _ in range(size)] print(sum([matrix[x][x] for x", "for x in input().split()] for _ in range(size)] print(sum([matrix[x][x] for x in range(size)]))" ]
[ "+ \" \" + first_name certificate_name = full_name.upper() print(certificate_name) # Code starts here", "first_name = topper.split()[0] full_name = last_name + \" \" + first_name certificate_name =", "-------------- # Code starts here courses = {'Math':65, 'English': 70, 'History': 80, 'French':", "65+70+80+70+60 percentage = total * 100 / 500 print(total) print(percentage) # Code ends", "mathematics.get) print(topper) # Code ends here # -------------- # Given string topper =", "print(topper) # Code ends here # -------------- # Given string topper = '<NAME>'", "70, 'History': 80, 'French': 70, 'Science':60} total = 65+70+80+70+60 percentage = total *", "'<NAME>': 95, '<NAME>':65, '<NAME>':50, '<NAME>':70, '<NAME>': 66, '<NAME>':75} topper = max(mathematics,key = mathematics.get)", "['<NAME>','<NAME>','<NAME>','<NAME>'] class_2 = ['<NAME>','<NAME>','<NAME>'] new_class = class_1 + class_2 new_class.append('<NAME>') new_class.remove('<NAME>') print(new_class) #", "here class_1 = ['<NAME>','<NAME>','<NAME>','<NAME>'] class_2 = ['<NAME>','<NAME>','<NAME>'] new_class = class_1 + class_2 new_class.append('<NAME>')", "-------------- # Code starts here mathematics = {'<NAME> ': 78, '<NAME>': 95, '<NAME>':65,", "new_class.remove('<NAME>') print(new_class) # Code ends here # -------------- # Code starts here courses", "= {'Math':65, 'English': 70, 'History': 80, 'French': 70, 'Science':60} total = 65+70+80+70+60 percentage", "# Code ends here # -------------- # Code starts here courses = {'Math':65,", "\" \" + first_name certificate_name = full_name.upper() print(certificate_name) # Code starts here #", "percentage = total * 100 / 500 print(total) print(percentage) # Code ends here", "Code ends here # -------------- # Code starts here courses = {'Math':65, 'English':", "500 print(total) print(percentage) # Code ends here # -------------- # Code starts here", "topper.split()[1] first_name = topper.split()[0] full_name = last_name + \" \" + first_name certificate_name", "+ first_name certificate_name = full_name.upper() print(certificate_name) # Code starts here # Code ends", "{'<NAME> ': 78, '<NAME>': 95, '<NAME>':65, '<NAME>':50, '<NAME>':70, '<NAME>': 66, '<NAME>':75} topper =", "'<NAME>': 66, '<NAME>':75} topper = max(mathematics,key = mathematics.get) print(topper) # Code ends here", "full_name = last_name + \" \" + first_name certificate_name = full_name.upper() print(certificate_name) #", "/ 500 print(total) print(percentage) # Code ends here # -------------- # Code starts", "total * 100 / 500 print(total) print(percentage) # Code ends here # --------------", "# Code starts here class_1 = ['<NAME>','<NAME>','<NAME>','<NAME>'] class_2 = ['<NAME>','<NAME>','<NAME>'] new_class = class_1", "topper = max(mathematics,key = mathematics.get) print(topper) # Code ends here # -------------- #", "\" + first_name certificate_name = full_name.upper() print(certificate_name) # Code starts here # Code", "* 100 / 500 print(total) print(percentage) # Code ends here # -------------- #", "'<NAME>' last_name = topper.split()[1] first_name = topper.split()[0] full_name = last_name + \" \"", "class_1 + class_2 new_class.append('<NAME>') new_class.remove('<NAME>') print(new_class) # Code ends here # -------------- #", "= ['<NAME>','<NAME>','<NAME>'] new_class = class_1 + class_2 new_class.append('<NAME>') new_class.remove('<NAME>') print(new_class) # Code ends", "# -------------- # Code starts here mathematics = {'<NAME> ': 78, '<NAME>': 95,", "= topper.split()[1] first_name = topper.split()[0] full_name = last_name + \" \" + first_name", "here # -------------- # Code starts here mathematics = {'<NAME> ': 78, '<NAME>':", "-------------- # Given string topper = '<NAME>' last_name = topper.split()[1] first_name = topper.split()[0]", "95, '<NAME>':65, '<NAME>':50, '<NAME>':70, '<NAME>': 66, '<NAME>':75} topper = max(mathematics,key = mathematics.get) print(topper)", "here courses = {'Math':65, 'English': 70, 'History': 80, 'French': 70, 'Science':60} total =", "starts here mathematics = {'<NAME> ': 78, '<NAME>': 95, '<NAME>':65, '<NAME>':50, '<NAME>':70, '<NAME>':", "ends here # -------------- # Given string topper = '<NAME>' last_name = topper.split()[1]", "': 78, '<NAME>': 95, '<NAME>':65, '<NAME>':50, '<NAME>':70, '<NAME>': 66, '<NAME>':75} topper = max(mathematics,key", "= topper.split()[0] full_name = last_name + \" \" + first_name certificate_name = full_name.upper()", "# -------------- # Code starts here class_1 = ['<NAME>','<NAME>','<NAME>','<NAME>'] class_2 = ['<NAME>','<NAME>','<NAME>'] new_class", "'Science':60} total = 65+70+80+70+60 percentage = total * 100 / 500 print(total) print(percentage)", "class_1 = ['<NAME>','<NAME>','<NAME>','<NAME>'] class_2 = ['<NAME>','<NAME>','<NAME>'] new_class = class_1 + class_2 new_class.append('<NAME>') new_class.remove('<NAME>')", "topper = '<NAME>' last_name = topper.split()[1] first_name = topper.split()[0] full_name = last_name +", "'<NAME>':70, '<NAME>': 66, '<NAME>':75} topper = max(mathematics,key = mathematics.get) print(topper) # Code ends", "# Code starts here courses = {'Math':65, 'English': 70, 'History': 80, 'French': 70,", "'History': 80, 'French': 70, 'Science':60} total = 65+70+80+70+60 percentage = total * 100", "new_class.append('<NAME>') new_class.remove('<NAME>') print(new_class) # Code ends here # -------------- # Code starts here", "Code starts here mathematics = {'<NAME> ': 78, '<NAME>': 95, '<NAME>':65, '<NAME>':50, '<NAME>':70,", "= {'<NAME> ': 78, '<NAME>': 95, '<NAME>':65, '<NAME>':50, '<NAME>':70, '<NAME>': 66, '<NAME>':75} topper", "66, '<NAME>':75} topper = max(mathematics,key = mathematics.get) print(topper) # Code ends here #", "last_name + \" \" + first_name certificate_name = full_name.upper() print(certificate_name) # Code starts", "here # -------------- # Code starts here courses = {'Math':65, 'English': 70, 'History':", "['<NAME>','<NAME>','<NAME>'] new_class = class_1 + class_2 new_class.append('<NAME>') new_class.remove('<NAME>') print(new_class) # Code ends here", "courses = {'Math':65, 'English': 70, 'History': 80, 'French': 70, 'Science':60} total = 65+70+80+70+60", "70, 'Science':60} total = 65+70+80+70+60 percentage = total * 100 / 500 print(total)", "'<NAME>':65, '<NAME>':50, '<NAME>':70, '<NAME>': 66, '<NAME>':75} topper = max(mathematics,key = mathematics.get) print(topper) #", "first_name certificate_name = full_name.upper() print(certificate_name) # Code starts here # Code ends here", "Code ends here # -------------- # Given string topper = '<NAME>' last_name =", "# Code ends here # -------------- # Code starts here mathematics = {'<NAME>", "= total * 100 / 500 print(total) print(percentage) # Code ends here #", "# Given string topper = '<NAME>' last_name = topper.split()[1] first_name = topper.split()[0] full_name", "starts here courses = {'Math':65, 'English': 70, 'History': 80, 'French': 70, 'Science':60} total", "ends here # -------------- # Code starts here mathematics = {'<NAME> ': 78,", "new_class = class_1 + class_2 new_class.append('<NAME>') new_class.remove('<NAME>') print(new_class) # Code ends here #", "'English': 70, 'History': 80, 'French': 70, 'Science':60} total = 65+70+80+70+60 percentage = total", "print(new_class) # Code ends here # -------------- # Code starts here courses =", "= class_1 + class_2 new_class.append('<NAME>') new_class.remove('<NAME>') print(new_class) # Code ends here # --------------", "= last_name + \" \" + first_name certificate_name = full_name.upper() print(certificate_name) # Code", "# Code starts here mathematics = {'<NAME> ': 78, '<NAME>': 95, '<NAME>':65, '<NAME>':50,", "max(mathematics,key = mathematics.get) print(topper) # Code ends here # -------------- # Given string", "'<NAME>':50, '<NAME>':70, '<NAME>': 66, '<NAME>':75} topper = max(mathematics,key = mathematics.get) print(topper) # Code", "'French': 70, 'Science':60} total = 65+70+80+70+60 percentage = total * 100 / 500", "starts here class_1 = ['<NAME>','<NAME>','<NAME>','<NAME>'] class_2 = ['<NAME>','<NAME>','<NAME>'] new_class = class_1 + class_2", "-------------- # Code starts here class_1 = ['<NAME>','<NAME>','<NAME>','<NAME>'] class_2 = ['<NAME>','<NAME>','<NAME>'] new_class =", "100 / 500 print(total) print(percentage) # Code ends here # -------------- # Code", "Code starts here courses = {'Math':65, 'English': 70, 'History': 80, 'French': 70, 'Science':60}", "80, 'French': 70, 'Science':60} total = 65+70+80+70+60 percentage = total * 100 /", "# -------------- # Code starts here courses = {'Math':65, 'English': 70, 'History': 80,", "# -------------- # Given string topper = '<NAME>' last_name = topper.split()[1] first_name =", "class_2 new_class.append('<NAME>') new_class.remove('<NAME>') print(new_class) # Code ends here # -------------- # Code starts", "Code starts here class_1 = ['<NAME>','<NAME>','<NAME>','<NAME>'] class_2 = ['<NAME>','<NAME>','<NAME>'] new_class = class_1 +", "mathematics = {'<NAME> ': 78, '<NAME>': 95, '<NAME>':65, '<NAME>':50, '<NAME>':70, '<NAME>': 66, '<NAME>':75}", "here # -------------- # Given string topper = '<NAME>' last_name = topper.split()[1] first_name", "# Code ends here # -------------- # Given string topper = '<NAME>' last_name", "= ['<NAME>','<NAME>','<NAME>','<NAME>'] class_2 = ['<NAME>','<NAME>','<NAME>'] new_class = class_1 + class_2 new_class.append('<NAME>') new_class.remove('<NAME>') print(new_class)", "= 65+70+80+70+60 percentage = total * 100 / 500 print(total) print(percentage) # Code", "= mathematics.get) print(topper) # Code ends here # -------------- # Given string topper", "78, '<NAME>': 95, '<NAME>':65, '<NAME>':50, '<NAME>':70, '<NAME>': 66, '<NAME>':75} topper = max(mathematics,key =", "print(percentage) # Code ends here # -------------- # Code starts here mathematics =", "{'Math':65, 'English': 70, 'History': 80, 'French': 70, 'Science':60} total = 65+70+80+70+60 percentage =", "total = 65+70+80+70+60 percentage = total * 100 / 500 print(total) print(percentage) #", "last_name = topper.split()[1] first_name = topper.split()[0] full_name = last_name + \" \" +", "= max(mathematics,key = mathematics.get) print(topper) # Code ends here # -------------- # Given", "class_2 = ['<NAME>','<NAME>','<NAME>'] new_class = class_1 + class_2 new_class.append('<NAME>') new_class.remove('<NAME>') print(new_class) # Code", "print(total) print(percentage) # Code ends here # -------------- # Code starts here mathematics", "here mathematics = {'<NAME> ': 78, '<NAME>': 95, '<NAME>':65, '<NAME>':50, '<NAME>':70, '<NAME>': 66,", "'<NAME>':75} topper = max(mathematics,key = mathematics.get) print(topper) # Code ends here # --------------", "string topper = '<NAME>' last_name = topper.split()[1] first_name = topper.split()[0] full_name = last_name", "Given string topper = '<NAME>' last_name = topper.split()[1] first_name = topper.split()[0] full_name =", "topper.split()[0] full_name = last_name + \" \" + first_name certificate_name = full_name.upper() print(certificate_name)", "= '<NAME>' last_name = topper.split()[1] first_name = topper.split()[0] full_name = last_name + \"", "+ class_2 new_class.append('<NAME>') new_class.remove('<NAME>') print(new_class) # Code ends here # -------------- # Code", "ends here # -------------- # Code starts here courses = {'Math':65, 'English': 70,", "Code ends here # -------------- # Code starts here mathematics = {'<NAME> ':" ]
[ "json parameters = pd.json_normalize( df.pop('parameters').apply(lambda s: json.loads(s))).set_index( df.index) df = df.join(parameters) # re", "cur.execute( \"\"\" SELECT uuid, created, parameters, comment FROM experiments ORDER BY created DESC", "'html' else str(table)) + footer) def _can_update(self, uuid): \"\"\"Check if an experiment with", "if fmt not in {'html', 'plain'}: raise ValueError('fmt must be one \"html\" or", "uuid = uuid4().hex cur = self.conn.cursor() cur.execute( \"\"\" INSERT INTO experiments (uuid) VALUES(?)", "pd.read_sql(query, self.conn, params=[n], index_col='uuid') if normalize: # parse and normalize json parameters =", "self.conn.commit() def comment(self, uuid, comment): \"\"\"Add a comment to an experiment given its", "\"\"\"Add a comment to an experiment given its uuid \"\"\" # TODO: add", "comment TEXT ) \"\"\") cur.close() def __getitem__(self, uuid): \"\"\"Get experiment with a given", "experiments ORDER BY created DESC LIMIT ? \"\"\", [n]) res = cur.fetchall() table", "Table class SQLiteTracker: \"\"\"A simple experiment tracker using SQLite :doc:`Click here <../user_guide/SQLiteTracker>` to", "INSERT INTO experiments (uuid, parameters) VALUES(?, ?) \"\"\", [uuid, json.dumps(parameters)]) cur.close() self.conn.commit() def", "self.conn, params=[n], index_col='uuid') if normalize: # parse and normalize json parameters = pd.json_normalize(", "list of uuids return pd.read_sql('SELECT * FROM experiments WHERE uuid = ?', self.conn,", "can be updated \"\"\" cur = self.conn.cursor() cur.execute( \"\"\" SELECT parameters FROM experiments", "... \"SELECT uuid, json_extract(parameters, '$.a') FROM experiments\") \"\"\" df = pd.read_sql(code, self.conn) if", "json import pandas as pd from sklearn_evaluation.table import Table class SQLiteTracker: \"\"\"A simple", "== 'html' else '\\n') + '(Most recent experiments)') else: footer = '' return", "it work for a list of uuids return pd.read_sql('SELECT * FROM experiments WHERE", "FROM experiments\") \"\"\" df = pd.read_sql(code, self.conn) if 'uuid' in df: df =", "experiment, returns a uuid \"\"\" uuid = uuid4().hex cur = self.conn.cursor() cur.execute( \"\"\"", "'$.a') FROM experiments\") \"\"\" df = pd.read_sql(code, self.conn) if 'uuid' in df: df", "= ?', self.conn, params=[uuid], index_col='uuid') def recent(self, n=5, normalize=False): \"\"\"Get most recent experiments", "\"{}\" because it does ' 'not exist'.format(uuid)) def __repr__(self): return self._recent(fmt='plain') def _repr_html_(self):", "IF NOT EXISTS experiments ( uuid TEXT NOT NULL UNIQUE, created TIMESTAMP default", "SELECT parameters FROM experiments WHERE uuid = ? \"\"\", [uuid]) row = cur.fetchone()", "json.dumps(parameters)]) cur.close() self.conn.commit() def comment(self, uuid, comment): \"\"\"Add a comment to an experiment", "BY created DESC LIMIT ? \"\"\" df = pd.read_sql(query, self.conn, params=[n], index_col='uuid') if", "cur.execute( \"\"\" INSERT INTO experiments (uuid) VALUES(?) \"\"\", [uuid]) cur.close() self.conn.commit() return uuid", "\"\"\", [json.dumps(parameters), uuid]) cur.close() self.conn.commit() def insert(self, uuid, parameters): \"\"\"Insert a new experiment", "= self.conn.cursor() cur.execute( \"\"\" UPDATE experiments SET comment = ? WHERE uuid =", "from uuid import uuid4 import sqlite3 import json import pandas as pd from", "import uuid4 import sqlite3 import json import pandas as pd from sklearn_evaluation.table import", "self.conn.commit() return uuid def update(self, uuid, parameters): \"\"\"Update the parameters of an empty", "# re order columns to show \"comment\" at the end comment = df.pop('comment')", "Parameters ---------- path Database location \"\"\" def __init__(self, path: str): self.conn = sqlite3.connect(path)", "exists = row is not None if exists: empty = row[0] is None", "returns a uuid \"\"\" uuid = uuid4().hex cur = self.conn.cursor() cur.execute( \"\"\" INSERT", "UPDATE experiments SET parameters = ? WHERE uuid = ? \"\"\", [json.dumps(parameters), uuid])", "ValueError('fmt must be one \"html\" or \"plain\"') cur = self.conn.cursor() cur.execute( \"\"\" SELECT", "pandas as pd from sklearn_evaluation.table import Table class SQLiteTracker: \"\"\"A simple experiment tracker", "str(table)) + footer) def _can_update(self, uuid): \"\"\"Check if an experiment with a given", "== 'plain': title += '\\n' if len(table): footer = (('<br>' if fmt ==", "FROM experiments ORDER BY created DESC LIMIT ? \"\"\", [n]) res = cur.fetchall()", "json.loads(s))).set_index( df.index) df = df.join(parameters) # re order columns to show \"comment\" at", "{} </h4>' if fmt == 'html' else '{}\\n' title = title_template.format(type(self).__name__) if not", "= pd.read_sql(code, self.conn) if 'uuid' in df: df = df.set_index('uuid') return df def", "if not empty: raise ValueError('Cannot update non-empty experiment with ' 'uuid \"{}\"'.format(uuid)) else:", "\"\"\"Insert a new experiment \"\"\" cur = self.conn.cursor() cur.execute( \"\"\" INSERT INTO experiments", "parameters, comment FROM experiments ORDER BY created DESC LIMIT ? \"\"\", [n]) res", "if not len(table): title += '(No experiments saved yet)' if fmt == 'plain':", "\"\"\" uuid = uuid4().hex cur = self.conn.cursor() cur.execute( \"\"\" INSERT INTO experiments (uuid)", "new experiment, returns a uuid \"\"\" uuid = uuid4().hex cur = self.conn.cursor() cur.execute(", "pd.read_sql(code, self.conn) if 'uuid' in df: df = df.set_index('uuid') return df def new(self):", "not in {'html', 'plain'}: raise ValueError('fmt must be one \"html\" or \"plain\"') cur", "= tracker.query( ... \"SELECT uuid, json_extract(parameters, '$.a') FROM experiments\") \"\"\" df = pd.read_sql(code,", "= ? WHERE uuid = ? \"\"\", [json.dumps(parameters), uuid]) cur.close() self.conn.commit() def insert(self,", "return (title + (table.to_html() if fmt == 'html' else str(table)) + footer) def", "normalize json parameters = pd.json_normalize( df.pop('parameters').apply(lambda s: json.loads(s))).set_index( df.index) df = df.join(parameters) #", "else: raise ValueError('Cannot update experiment with ' 'uuid \"{}\" because it does '", "= row is not None if exists: empty = row[0] is None if", "parameters of an empty experiment given its uuid \"\"\" self._can_update(uuid) cur = self.conn.cursor()", "\"\"\", [n]) res = cur.fetchall() table = Table(res, header=['uuid', 'created', 'parameters', 'comment']) title_template", "given its uuid \"\"\" # TODO: add overwrite (false by default) and append", "\"\"\", [uuid, json.dumps(parameters)]) cur.close() self.conn.commit() def comment(self, uuid, comment): \"\"\"Add a comment to", "comment = ? WHERE uuid = ? \"\"\", [comment, uuid]) cur.close() self.conn.commit() def", "here <../user_guide/SQLiteTracker>` to see the user guide. Parameters ---------- path Database location \"\"\"", "uuid, parameters): \"\"\"Insert a new experiment \"\"\" cur = self.conn.cursor() cur.execute( \"\"\" INSERT", "df = pd.read_sql(code, self.conn) if 'uuid' in df: df = df.set_index('uuid') return df", "experiments as a pandas.DataFrame \"\"\" query = \"\"\" SELECT uuid, created, parameters, comment", "[uuid]) row = cur.fetchone() exists = row is not None if exists: empty", "'\\n' if len(table): footer = (('<br>' if fmt == 'html' else '\\n') +", "? WHERE uuid = ? \"\"\", [comment, uuid]) cur.close() self.conn.commit() def _recent(self, n=5,", ">>> tracker.insert('my_uuid', {'a': 1}) >>> df = tracker.query( ... \"SELECT uuid, json_extract(parameters, '$.a')", "s: json.loads(s))).set_index( df.index) df = df.join(parameters) # re order columns to show \"comment\"", "WHERE uuid = ?', self.conn, params=[uuid], index_col='uuid') def recent(self, n=5, normalize=False): \"\"\"Get most", "(uuid, parameters) VALUES(?, ?) \"\"\", [uuid, json.dumps(parameters)]) cur.close() self.conn.commit() def comment(self, uuid, comment):", "cur.execute( \"\"\" SELECT parameters FROM experiments WHERE uuid = ? \"\"\", [uuid]) row", "= df.pop('comment') df.insert(len(df.columns), 'comment', comment) return df def query(self, code): \"\"\"Query the database,", "df: df = df.set_index('uuid') return df def new(self): \"\"\"Create a new experiment, returns", "= self.conn.cursor() cur.execute( \"\"\" INSERT INTO experiments (uuid) VALUES(?) \"\"\", [uuid]) cur.close() self.conn.commit()", "created, parameters, comment FROM experiments ORDER BY created DESC LIMIT ? \"\"\", [n])", "update experiment with ' 'uuid \"{}\" because it does ' 'not exist'.format(uuid)) def", "new experiment \"\"\" cur = self.conn.cursor() cur.execute( \"\"\" INSERT INTO experiments (uuid, parameters)", "title_template.format(type(self).__name__) if not len(table): title += '(No experiments saved yet)' if fmt ==", "\"\"\"Get most recent experiments as a pandas.DataFrame \"\"\" query = \"\"\" SELECT uuid,", "+= '(No experiments saved yet)' if fmt == 'plain': title += '\\n' if", "cur = self.conn.cursor() cur.execute( \"\"\" INSERT INTO experiments (uuid) VALUES(?) \"\"\", [uuid]) cur.close()", "self.conn.cursor() cur.execute( \"\"\" UPDATE experiments SET parameters = ? WHERE uuid = ?", "SET comment = ? WHERE uuid = ? \"\"\", [comment, uuid]) cur.close() self.conn.commit()", "else str(table)) + footer) def _can_update(self, uuid): \"\"\"Check if an experiment with a", "'(Most recent experiments)') else: footer = '' return (title + (table.to_html() if fmt", "parameters): \"\"\"Insert a new experiment \"\"\" cur = self.conn.cursor() cur.execute( \"\"\" INSERT INTO", "---------- path Database location \"\"\" def __init__(self, path: str): self.conn = sqlite3.connect(path) cur", "uuid = ? \"\"\", [uuid]) row = cur.fetchone() exists = row is not", "tracker using SQLite :doc:`Click here <../user_guide/SQLiteTracker>` to see the user guide. Parameters ----------", "and normalize json parameters = pd.json_normalize( df.pop('parameters').apply(lambda s: json.loads(s))).set_index( df.index) df = df.join(parameters)", "uuid = ?', self.conn, params=[uuid], index_col='uuid') def recent(self, n=5, normalize=False): \"\"\"Get most recent", "SQLiteTracker >>> tracker = SQLiteTracker(':memory:') # example in-memory db >>> tracker.insert('my_uuid', {'a': 1})", "UNIQUE, created TIMESTAMP default current_timestamp, parameters TEXT, comment TEXT ) \"\"\") cur.close() def", "created DESC LIMIT ? \"\"\", [n]) res = cur.fetchall() table = Table(res, header=['uuid',", "a given uuid can be updated \"\"\" cur = self.conn.cursor() cur.execute( \"\"\" SELECT", "\"\"\" def __init__(self, path: str): self.conn = sqlite3.connect(path) cur = self.conn.cursor() cur.execute(\"\"\" CREATE", "as pd from sklearn_evaluation.table import Table class SQLiteTracker: \"\"\"A simple experiment tracker using", "VALUES(?, ?) \"\"\", [uuid, json.dumps(parameters)]) cur.close() self.conn.commit() def comment(self, uuid, comment): \"\"\"Add a", "an experiment with a given uuid can be updated \"\"\" cur = self.conn.cursor()", "\"\"\" UPDATE experiments SET comment = ? WHERE uuid = ? \"\"\", [comment,", "BY created DESC LIMIT ? \"\"\", [n]) res = cur.fetchall() table = Table(res,", "else '\\n') + '(Most recent experiments)') else: footer = '' return (title +", "uuid): \"\"\"Get experiment with a given uuid \"\"\" # TODO: make it work", "raise ValueError('Cannot update non-empty experiment with ' 'uuid \"{}\"'.format(uuid)) else: raise ValueError('Cannot update", "columns to show \"comment\" at the end comment = df.pop('comment') df.insert(len(df.columns), 'comment', comment)", "an empty experiment given its uuid \"\"\" self._can_update(uuid) cur = self.conn.cursor() cur.execute( \"\"\"", "cur = self.conn.cursor() cur.execute( \"\"\" UPDATE experiments SET comment = ? WHERE uuid", ">>> df = tracker.query( ... \"SELECT uuid, json_extract(parameters, '$.a') FROM experiments\") \"\"\" df", "experiments (uuid) VALUES(?) \"\"\", [uuid]) cur.close() self.conn.commit() return uuid def update(self, uuid, parameters):", "Table(res, header=['uuid', 'created', 'parameters', 'comment']) title_template = '<h4> {} </h4>' if fmt ==", "if fmt == 'html' else str(table)) + footer) def _can_update(self, uuid): \"\"\"Check if", "== 'html' else '{}\\n' title = title_template.format(type(self).__name__) if not len(table): title += '(No", "sqlite3.connect(path) cur = self.conn.cursor() cur.execute(\"\"\" CREATE TABLE IF NOT EXISTS experiments ( uuid", "\"\"\" self._can_update(uuid) cur = self.conn.cursor() cur.execute( \"\"\" UPDATE experiments SET parameters = ?", "'{}\\n' title = title_template.format(type(self).__name__) if not len(table): title += '(No experiments saved yet)'", "overwrite (false by default) and append options cur = self.conn.cursor() cur.execute( \"\"\" UPDATE", "TABLE IF NOT EXISTS experiments ( uuid TEXT NOT NULL UNIQUE, created TIMESTAMP", "LIMIT ? \"\"\", [n]) res = cur.fetchall() table = Table(res, header=['uuid', 'created', 'parameters',", "if an experiment with a given uuid can be updated \"\"\" cur =", "SQLiteTracker(':memory:') # example in-memory db >>> tracker.insert('my_uuid', {'a': 1}) >>> df = tracker.query(", "because it does ' 'not exist'.format(uuid)) def __repr__(self): return self._recent(fmt='plain') def _repr_html_(self): return", "if normalize: # parse and normalize json parameters = pd.json_normalize( df.pop('parameters').apply(lambda s: json.loads(s))).set_index(", "index_col='uuid') if normalize: # parse and normalize json parameters = pd.json_normalize( df.pop('parameters').apply(lambda s:", "sklearn_evaluation import SQLiteTracker >>> tracker = SQLiteTracker(':memory:') # example in-memory db >>> tracker.insert('my_uuid',", "default current_timestamp, parameters TEXT, comment TEXT ) \"\"\") cur.close() def __getitem__(self, uuid): \"\"\"Get", "import json import pandas as pd from sklearn_evaluation.table import Table class SQLiteTracker: \"\"\"A", "one \"html\" or \"plain\"') cur = self.conn.cursor() cur.execute( \"\"\" SELECT uuid, created, parameters,", "parameters) VALUES(?, ?) \"\"\", [uuid, json.dumps(parameters)]) cur.close() self.conn.commit() def comment(self, uuid, comment): \"\"\"Add", "TEXT, comment TEXT ) \"\"\") cur.close() def __getitem__(self, uuid): \"\"\"Get experiment with a", "experiments (uuid, parameters) VALUES(?, ?) \"\"\", [uuid, json.dumps(parameters)]) cur.close() self.conn.commit() def comment(self, uuid,", "self.conn = sqlite3.connect(path) cur = self.conn.cursor() cur.execute(\"\"\" CREATE TABLE IF NOT EXISTS experiments", "db >>> tracker.insert('my_uuid', {'a': 1}) >>> df = tracker.query( ... \"SELECT uuid, json_extract(parameters,", "update non-empty experiment with ' 'uuid \"{}\"'.format(uuid)) else: raise ValueError('Cannot update experiment with", "returns a pandas.DataFrame Examples -------- >>> from sklearn_evaluation import SQLiteTracker >>> tracker =", "comment = df.pop('comment') df.insert(len(df.columns), 'comment', comment) return df def query(self, code): \"\"\"Query the", "\"\"\" UPDATE experiments SET parameters = ? WHERE uuid = ? \"\"\", [json.dumps(parameters),", "'html' else '{}\\n' title = title_template.format(type(self).__name__) if not len(table): title += '(No experiments", "a new experiment, returns a uuid \"\"\" uuid = uuid4().hex cur = self.conn.cursor()", "the end comment = df.pop('comment') df.insert(len(df.columns), 'comment', comment) return df def query(self, code):", "\"\"\", [uuid]) cur.close() self.conn.commit() return uuid def update(self, uuid, parameters): \"\"\"Update the parameters", "= '' return (title + (table.to_html() if fmt == 'html' else str(table)) +", "recent experiments as a pandas.DataFrame \"\"\" query = \"\"\" SELECT uuid, created, parameters,", "from sklearn_evaluation import SQLiteTracker >>> tracker = SQLiteTracker(':memory:') # example in-memory db >>>", "\"\"\" df = pd.read_sql(code, self.conn) if 'uuid' in df: df = df.set_index('uuid') return", "# example in-memory db >>> tracker.insert('my_uuid', {'a': 1}) >>> df = tracker.query( ...", "uuid, comment): \"\"\"Add a comment to an experiment given its uuid \"\"\" #", "\"{}\"'.format(uuid)) else: raise ValueError('Cannot update experiment with ' 'uuid \"{}\" because it does", "'not exist'.format(uuid)) def __repr__(self): return self._recent(fmt='plain') def _repr_html_(self): return self._recent(fmt='html') def __del__(self): self.conn.close()", "'uuid \"{}\" because it does ' 'not exist'.format(uuid)) def __repr__(self): return self._recent(fmt='plain') def", "\"\"\"Get experiment with a given uuid \"\"\" # TODO: make it work for", "df def new(self): \"\"\"Create a new experiment, returns a uuid \"\"\" uuid =", "cur = self.conn.cursor() cur.execute(\"\"\" CREATE TABLE IF NOT EXISTS experiments ( uuid TEXT", "\"\"\" SELECT parameters FROM experiments WHERE uuid = ? \"\"\", [uuid]) row =", "? \"\"\", [uuid]) row = cur.fetchone() exists = row is not None if", "using SQLite :doc:`Click here <../user_guide/SQLiteTracker>` to see the user guide. Parameters ---------- path", "uuid def update(self, uuid, parameters): \"\"\"Update the parameters of an empty experiment given", "\"\"\", [uuid]) row = cur.fetchone() exists = row is not None if exists:", "NOT NULL UNIQUE, created TIMESTAMP default current_timestamp, parameters TEXT, comment TEXT ) \"\"\")", "( uuid TEXT NOT NULL UNIQUE, created TIMESTAMP default current_timestamp, parameters TEXT, comment", "'<h4> {} </h4>' if fmt == 'html' else '{}\\n' title = title_template.format(type(self).__name__) if", "= \"\"\" SELECT uuid, created, parameters, comment FROM experiments ORDER BY created DESC", "experiments WHERE uuid = ?', self.conn, params=[uuid], index_col='uuid') def recent(self, n=5, normalize=False): \"\"\"Get", "\"\"\" INSERT INTO experiments (uuid, parameters) VALUES(?, ?) \"\"\", [uuid, json.dumps(parameters)]) cur.close() self.conn.commit()", "NOT EXISTS experiments ( uuid TEXT NOT NULL UNIQUE, created TIMESTAMP default current_timestamp,", "(table.to_html() if fmt == 'html' else str(table)) + footer) def _can_update(self, uuid): \"\"\"Check", "TODO: add overwrite (false by default) and append options cur = self.conn.cursor() cur.execute(", "uuids return pd.read_sql('SELECT * FROM experiments WHERE uuid = ?', self.conn, params=[uuid], index_col='uuid')", "' 'uuid \"{}\"'.format(uuid)) else: raise ValueError('Cannot update experiment with ' 'uuid \"{}\" because", "TEXT NOT NULL UNIQUE, created TIMESTAMP default current_timestamp, parameters TEXT, comment TEXT )", "+ '(Most recent experiments)') else: footer = '' return (title + (table.to_html() if", "cur = self.conn.cursor() cur.execute( \"\"\" UPDATE experiments SET parameters = ? WHERE uuid", "n=5, normalize=False): \"\"\"Get most recent experiments as a pandas.DataFrame \"\"\" query = \"\"\"", "CREATE TABLE IF NOT EXISTS experiments ( uuid TEXT NOT NULL UNIQUE, created", "df = pd.read_sql(query, self.conn, params=[n], index_col='uuid') if normalize: # parse and normalize json", "WHERE uuid = ? \"\"\", [json.dumps(parameters), uuid]) cur.close() self.conn.commit() def insert(self, uuid, parameters):", "to an experiment given its uuid \"\"\" # TODO: add overwrite (false by", "or \"plain\"') cur = self.conn.cursor() cur.execute( \"\"\" SELECT uuid, created, parameters, comment FROM", "if exists: empty = row[0] is None if not empty: raise ValueError('Cannot update", "ValueError('Cannot update non-empty experiment with ' 'uuid \"{}\"'.format(uuid)) else: raise ValueError('Cannot update experiment", "experiment tracker using SQLite :doc:`Click here <../user_guide/SQLiteTracker>` to see the user guide. Parameters", "? \"\"\" df = pd.read_sql(query, self.conn, params=[n], index_col='uuid') if normalize: # parse and", "add overwrite (false by default) and append options cur = self.conn.cursor() cur.execute( \"\"\"", "[uuid]) cur.close() self.conn.commit() return uuid def update(self, uuid, parameters): \"\"\"Update the parameters of", "WHERE uuid = ? \"\"\", [uuid]) row = cur.fetchone() exists = row is", "uuid = ? \"\"\", [comment, uuid]) cur.close() self.conn.commit() def _recent(self, n=5, fmt='html'): if", "'parameters', 'comment']) title_template = '<h4> {} </h4>' if fmt == 'html' else '{}\\n'", "= self.conn.cursor() cur.execute( \"\"\" SELECT parameters FROM experiments WHERE uuid = ? \"\"\",", "a pandas.DataFrame Examples -------- >>> from sklearn_evaluation import SQLiteTracker >>> tracker = SQLiteTracker(':memory:')", "SQLiteTracker: \"\"\"A simple experiment tracker using SQLite :doc:`Click here <../user_guide/SQLiteTracker>` to see the", "uuid \"\"\" self._can_update(uuid) cur = self.conn.cursor() cur.execute( \"\"\" UPDATE experiments SET parameters =", "__getitem__(self, uuid): \"\"\"Get experiment with a given uuid \"\"\" # TODO: make it", "comment) return df def query(self, code): \"\"\"Query the database, returns a pandas.DataFrame Examples", "its uuid \"\"\" # TODO: add overwrite (false by default) and append options", "class SQLiteTracker: \"\"\"A simple experiment tracker using SQLite :doc:`Click here <../user_guide/SQLiteTracker>` to see", "self.conn.cursor() cur.execute( \"\"\" INSERT INTO experiments (uuid, parameters) VALUES(?, ?) \"\"\", [uuid, json.dumps(parameters)])", "fmt == 'html' else '\\n') + '(Most recent experiments)') else: footer = ''", "(('<br>' if fmt == 'html' else '\\n') + '(Most recent experiments)') else: footer", "in df: df = df.set_index('uuid') return df def new(self): \"\"\"Create a new experiment,", "uuid]) cur.close() self.conn.commit() def insert(self, uuid, parameters): \"\"\"Insert a new experiment \"\"\" cur", "options cur = self.conn.cursor() cur.execute( \"\"\" UPDATE experiments SET comment = ? WHERE", "FROM experiments ORDER BY created DESC LIMIT ? \"\"\" df = pd.read_sql(query, self.conn,", "_recent(self, n=5, fmt='html'): if fmt not in {'html', 'plain'}: raise ValueError('fmt must be", "def update(self, uuid, parameters): \"\"\"Update the parameters of an empty experiment given its", "None if exists: empty = row[0] is None if not empty: raise ValueError('Cannot", "as a pandas.DataFrame \"\"\" query = \"\"\" SELECT uuid, created, parameters, comment FROM", "= pd.read_sql(query, self.conn, params=[n], index_col='uuid') if normalize: # parse and normalize json parameters", "recent experiments)') else: footer = '' return (title + (table.to_html() if fmt ==", "if fmt == 'html' else '\\n') + '(Most recent experiments)') else: footer =", "cur.fetchall() table = Table(res, header=['uuid', 'created', 'parameters', 'comment']) title_template = '<h4> {} </h4>'", "raise ValueError('Cannot update experiment with ' 'uuid \"{}\" because it does ' 'not", "params=[uuid], index_col='uuid') def recent(self, n=5, normalize=False): \"\"\"Get most recent experiments as a pandas.DataFrame", "experiments ( uuid TEXT NOT NULL UNIQUE, created TIMESTAMP default current_timestamp, parameters TEXT,", "uuid): \"\"\"Check if an experiment with a given uuid can be updated \"\"\"", "= ? \"\"\", [uuid]) row = cur.fetchone() exists = row is not None", "must be one \"html\" or \"plain\"') cur = self.conn.cursor() cur.execute( \"\"\" SELECT uuid,", "row = cur.fetchone() exists = row is not None if exists: empty =", "with ' 'uuid \"{}\"'.format(uuid)) else: raise ValueError('Cannot update experiment with ' 'uuid \"{}\"", "experiment given its uuid \"\"\" self._can_update(uuid) cur = self.conn.cursor() cur.execute( \"\"\" UPDATE experiments", "= self.conn.cursor() cur.execute( \"\"\" INSERT INTO experiments (uuid, parameters) VALUES(?, ?) \"\"\", [uuid,", "= title_template.format(type(self).__name__) if not len(table): title += '(No experiments saved yet)' if fmt", "fmt == 'html' else str(table)) + footer) def _can_update(self, uuid): \"\"\"Check if an", "pandas.DataFrame Examples -------- >>> from sklearn_evaluation import SQLiteTracker >>> tracker = SQLiteTracker(':memory:') #", "parameters = ? WHERE uuid = ? \"\"\", [json.dumps(parameters), uuid]) cur.close() self.conn.commit() def", "'uuid \"{}\"'.format(uuid)) else: raise ValueError('Cannot update experiment with ' 'uuid \"{}\" because it", "uuid, json_extract(parameters, '$.a') FROM experiments\") \"\"\" df = pd.read_sql(code, self.conn) if 'uuid' in", "return uuid def update(self, uuid, parameters): \"\"\"Update the parameters of an empty experiment", "import SQLiteTracker >>> tracker = SQLiteTracker(':memory:') # example in-memory db >>> tracker.insert('my_uuid', {'a':", "code): \"\"\"Query the database, returns a pandas.DataFrame Examples -------- >>> from sklearn_evaluation import", "'' return (title + (table.to_html() if fmt == 'html' else str(table)) + footer)", "DESC LIMIT ? \"\"\", [n]) res = cur.fetchall() table = Table(res, header=['uuid', 'created',", "self.conn.cursor() cur.execute( \"\"\" SELECT uuid, created, parameters, comment FROM experiments ORDER BY created", "cur.close() def __getitem__(self, uuid): \"\"\"Get experiment with a given uuid \"\"\" # TODO:", "to see the user guide. Parameters ---------- path Database location \"\"\" def __init__(self,", "fmt == 'html' else '{}\\n' title = title_template.format(type(self).__name__) if not len(table): title +=", "'html' else '\\n') + '(Most recent experiments)') else: footer = '' return (title", "comment(self, uuid, comment): \"\"\"Add a comment to an experiment given its uuid \"\"\"", "normalize: # parse and normalize json parameters = pd.json_normalize( df.pop('parameters').apply(lambda s: json.loads(s))).set_index( df.index)", "<../user_guide/SQLiteTracker>` to see the user guide. Parameters ---------- path Database location \"\"\" def", "VALUES(?) \"\"\", [uuid]) cur.close() self.conn.commit() return uuid def update(self, uuid, parameters): \"\"\"Update the", "created TIMESTAMP default current_timestamp, parameters TEXT, comment TEXT ) \"\"\") cur.close() def __getitem__(self,", "else: footer = '' return (title + (table.to_html() if fmt == 'html' else", "given uuid can be updated \"\"\" cur = self.conn.cursor() cur.execute( \"\"\" SELECT parameters", "sklearn_evaluation.table import Table class SQLiteTracker: \"\"\"A simple experiment tracker using SQLite :doc:`Click here", "experiments SET comment = ? WHERE uuid = ? \"\"\", [comment, uuid]) cur.close()", "cur = self.conn.cursor() cur.execute( \"\"\" SELECT parameters FROM experiments WHERE uuid = ?", "from sklearn_evaluation.table import Table class SQLiteTracker: \"\"\"A simple experiment tracker using SQLite :doc:`Click", "for a list of uuids return pd.read_sql('SELECT * FROM experiments WHERE uuid =", "with ' 'uuid \"{}\" because it does ' 'not exist'.format(uuid)) def __repr__(self): return", "'comment']) title_template = '<h4> {} </h4>' if fmt == 'html' else '{}\\n' title", "normalize=False): \"\"\"Get most recent experiments as a pandas.DataFrame \"\"\" query = \"\"\" SELECT", "(uuid) VALUES(?) \"\"\", [uuid]) cur.close() self.conn.commit() return uuid def update(self, uuid, parameters): \"\"\"Update", "it does ' 'not exist'.format(uuid)) def __repr__(self): return self._recent(fmt='plain') def _repr_html_(self): return self._recent(fmt='html')", "else '{}\\n' title = title_template.format(type(self).__name__) if not len(table): title += '(No experiments saved", "if 'uuid' in df: df = df.set_index('uuid') return df def new(self): \"\"\"Create a", "? WHERE uuid = ? \"\"\", [json.dumps(parameters), uuid]) cur.close() self.conn.commit() def insert(self, uuid,", "df = tracker.query( ... \"SELECT uuid, json_extract(parameters, '$.a') FROM experiments\") \"\"\" df =", "is not None if exists: empty = row[0] is None if not empty:", "+ footer) def _can_update(self, uuid): \"\"\"Check if an experiment with a given uuid", "tracker.query( ... \"SELECT uuid, json_extract(parameters, '$.a') FROM experiments\") \"\"\" df = pd.read_sql(code, self.conn)", "be updated \"\"\" cur = self.conn.cursor() cur.execute( \"\"\" SELECT parameters FROM experiments WHERE", "footer) def _can_update(self, uuid): \"\"\"Check if an experiment with a given uuid can", "experiment \"\"\" cur = self.conn.cursor() cur.execute( \"\"\" INSERT INTO experiments (uuid, parameters) VALUES(?,", "= self.conn.cursor() cur.execute(\"\"\" CREATE TABLE IF NOT EXISTS experiments ( uuid TEXT NOT", "pd.read_sql('SELECT * FROM experiments WHERE uuid = ?', self.conn, params=[uuid], index_col='uuid') def recent(self,", "# TODO: add overwrite (false by default) and append options cur = self.conn.cursor()", ">>> tracker = SQLiteTracker(':memory:') # example in-memory db >>> tracker.insert('my_uuid', {'a': 1}) >>>", "in-memory db >>> tracker.insert('my_uuid', {'a': 1}) >>> df = tracker.query( ... \"SELECT uuid,", "new(self): \"\"\"Create a new experiment, returns a uuid \"\"\" uuid = uuid4().hex cur", "FROM experiments WHERE uuid = ? \"\"\", [uuid]) row = cur.fetchone() exists =", "\"\"\" SELECT uuid, created, parameters, comment FROM experiments ORDER BY created DESC LIMIT", "{'html', 'plain'}: raise ValueError('fmt must be one \"html\" or \"plain\"') cur = self.conn.cursor()", "row is not None if exists: empty = row[0] is None if not", "experiments)') else: footer = '' return (title + (table.to_html() if fmt == 'html'", "df def query(self, code): \"\"\"Query the database, returns a pandas.DataFrame Examples -------- >>>", ") \"\"\") cur.close() def __getitem__(self, uuid): \"\"\"Get experiment with a given uuid \"\"\"", "self.conn.cursor() cur.execute( \"\"\" SELECT parameters FROM experiments WHERE uuid = ? \"\"\", [uuid])", "= uuid4().hex cur = self.conn.cursor() cur.execute( \"\"\" INSERT INTO experiments (uuid) VALUES(?) \"\"\",", "its uuid \"\"\" self._can_update(uuid) cur = self.conn.cursor() cur.execute( \"\"\" UPDATE experiments SET parameters", "insert(self, uuid, parameters): \"\"\"Insert a new experiment \"\"\" cur = self.conn.cursor() cur.execute( \"\"\"", "not empty: raise ValueError('Cannot update non-empty experiment with ' 'uuid \"{}\"'.format(uuid)) else: raise", "df.set_index('uuid') return df def new(self): \"\"\"Create a new experiment, returns a uuid \"\"\"", "= df.join(parameters) # re order columns to show \"comment\" at the end comment", "given uuid \"\"\" # TODO: make it work for a list of uuids", "parameters): \"\"\"Update the parameters of an empty experiment given its uuid \"\"\" self._can_update(uuid)", "' 'uuid \"{}\" because it does ' 'not exist'.format(uuid)) def __repr__(self): return self._recent(fmt='plain')", "\"html\" or \"plain\"') cur = self.conn.cursor() cur.execute( \"\"\" SELECT uuid, created, parameters, comment", "import pandas as pd from sklearn_evaluation.table import Table class SQLiteTracker: \"\"\"A simple experiment", "(title + (table.to_html() if fmt == 'html' else str(table)) + footer) def _can_update(self,", "self.conn.cursor() cur.execute(\"\"\" CREATE TABLE IF NOT EXISTS experiments ( uuid TEXT NOT NULL", "example in-memory db >>> tracker.insert('my_uuid', {'a': 1}) >>> df = tracker.query( ... \"SELECT", "df = df.set_index('uuid') return df def new(self): \"\"\"Create a new experiment, returns a", "def new(self): \"\"\"Create a new experiment, returns a uuid \"\"\" uuid = uuid4().hex", "_can_update(self, uuid): \"\"\"Check if an experiment with a given uuid can be updated", "[comment, uuid]) cur.close() self.conn.commit() def _recent(self, n=5, fmt='html'): if fmt not in {'html',", "params=[n], index_col='uuid') if normalize: # parse and normalize json parameters = pd.json_normalize( df.pop('parameters').apply(lambda", "__init__(self, path: str): self.conn = sqlite3.connect(path) cur = self.conn.cursor() cur.execute(\"\"\" CREATE TABLE IF", "pd from sklearn_evaluation.table import Table class SQLiteTracker: \"\"\"A simple experiment tracker using SQLite", "<gh_stars>100-1000 from uuid import uuid4 import sqlite3 import json import pandas as pd", "= cur.fetchone() exists = row is not None if exists: empty = row[0]", "uuid \"\"\" uuid = uuid4().hex cur = self.conn.cursor() cur.execute( \"\"\" INSERT INTO experiments", "a list of uuids return pd.read_sql('SELECT * FROM experiments WHERE uuid = ?',", "LIMIT ? \"\"\" df = pd.read_sql(query, self.conn, params=[n], index_col='uuid') if normalize: # parse", "\"\"\" # TODO: add overwrite (false by default) and append options cur =", "if fmt == 'html' else '{}\\n' title = title_template.format(type(self).__name__) if not len(table): title", "header=['uuid', 'created', 'parameters', 'comment']) title_template = '<h4> {} </h4>' if fmt == 'html'", "not len(table): title += '(No experiments saved yet)' if fmt == 'plain': title", "WHERE uuid = ? \"\"\", [comment, uuid]) cur.close() self.conn.commit() def _recent(self, n=5, fmt='html'):", "created DESC LIMIT ? \"\"\" df = pd.read_sql(query, self.conn, params=[n], index_col='uuid') if normalize:", "parameters FROM experiments WHERE uuid = ? \"\"\", [uuid]) row = cur.fetchone() exists", "ORDER BY created DESC LIMIT ? \"\"\" df = pd.read_sql(query, self.conn, params=[n], index_col='uuid')", "df = df.join(parameters) # re order columns to show \"comment\" at the end", "df.pop('parameters').apply(lambda s: json.loads(s))).set_index( df.index) df = df.join(parameters) # re order columns to show", "not None if exists: empty = row[0] is None if not empty: raise", "df.join(parameters) # re order columns to show \"comment\" at the end comment =", "\"comment\" at the end comment = df.pop('comment') df.insert(len(df.columns), 'comment', comment) return df def", "cur.execute( \"\"\" INSERT INTO experiments (uuid, parameters) VALUES(?, ?) \"\"\", [uuid, json.dumps(parameters)]) cur.close()", "'uuid' in df: df = df.set_index('uuid') return df def new(self): \"\"\"Create a new", "experiment given its uuid \"\"\" # TODO: add overwrite (false by default) and", "'created', 'parameters', 'comment']) title_template = '<h4> {} </h4>' if fmt == 'html' else", "re order columns to show \"comment\" at the end comment = df.pop('comment') df.insert(len(df.columns),", "most recent experiments as a pandas.DataFrame \"\"\" query = \"\"\" SELECT uuid, created,", "yet)' if fmt == 'plain': title += '\\n' if len(table): footer = (('<br>'", "parameters = pd.json_normalize( df.pop('parameters').apply(lambda s: json.loads(s))).set_index( df.index) df = df.join(parameters) # re order", "[n]) res = cur.fetchall() table = Table(res, header=['uuid', 'created', 'parameters', 'comment']) title_template =", "uuid4 import sqlite3 import json import pandas as pd from sklearn_evaluation.table import Table", "uuid TEXT NOT NULL UNIQUE, created TIMESTAMP default current_timestamp, parameters TEXT, comment TEXT", "import sqlite3 import json import pandas as pd from sklearn_evaluation.table import Table class", "of uuids return pd.read_sql('SELECT * FROM experiments WHERE uuid = ?', self.conn, params=[uuid],", "cur.close() self.conn.commit() return uuid def update(self, uuid, parameters): \"\"\"Update the parameters of an", "def __init__(self, path: str): self.conn = sqlite3.connect(path) cur = self.conn.cursor() cur.execute(\"\"\" CREATE TABLE", "uuid \"\"\" # TODO: make it work for a list of uuids return", "(false by default) and append options cur = self.conn.cursor() cur.execute( \"\"\" UPDATE experiments", "saved yet)' if fmt == 'plain': title += '\\n' if len(table): footer =", "ORDER BY created DESC LIMIT ? \"\"\", [n]) res = cur.fetchall() table =", "at the end comment = df.pop('comment') df.insert(len(df.columns), 'comment', comment) return df def query(self,", "= SQLiteTracker(':memory:') # example in-memory db >>> tracker.insert('my_uuid', {'a': 1}) >>> df =", "[uuid, json.dumps(parameters)]) cur.close() self.conn.commit() def comment(self, uuid, comment): \"\"\"Add a comment to an", "empty: raise ValueError('Cannot update non-empty experiment with ' 'uuid \"{}\"'.format(uuid)) else: raise ValueError('Cannot", "a comment to an experiment given its uuid \"\"\" # TODO: add overwrite", "sqlite3 import json import pandas as pd from sklearn_evaluation.table import Table class SQLiteTracker:", "n=5, fmt='html'): if fmt not in {'html', 'plain'}: raise ValueError('fmt must be one", "* FROM experiments WHERE uuid = ?', self.conn, params=[uuid], index_col='uuid') def recent(self, n=5,", "= sqlite3.connect(path) cur = self.conn.cursor() cur.execute(\"\"\" CREATE TABLE IF NOT EXISTS experiments (", "comment): \"\"\"Add a comment to an experiment given its uuid \"\"\" # TODO:", "res = cur.fetchall() table = Table(res, header=['uuid', 'created', 'parameters', 'comment']) title_template = '<h4>", "\"SELECT uuid, json_extract(parameters, '$.a') FROM experiments\") \"\"\" df = pd.read_sql(code, self.conn) if 'uuid'", "raise ValueError('fmt must be one \"html\" or \"plain\"') cur = self.conn.cursor() cur.execute( \"\"\"", "= ? WHERE uuid = ? \"\"\", [comment, uuid]) cur.close() self.conn.commit() def _recent(self,", "df.pop('comment') df.insert(len(df.columns), 'comment', comment) return df def query(self, code): \"\"\"Query the database, returns", "title = title_template.format(type(self).__name__) if not len(table): title += '(No experiments saved yet)' if", "default) and append options cur = self.conn.cursor() cur.execute( \"\"\" UPDATE experiments SET comment", "\"\"\" cur = self.conn.cursor() cur.execute( \"\"\" SELECT parameters FROM experiments WHERE uuid =", "+ (table.to_html() if fmt == 'html' else str(table)) + footer) def _can_update(self, uuid):", "cur.execute( \"\"\" UPDATE experiments SET comment = ? WHERE uuid = ? \"\"\",", "recent(self, n=5, normalize=False): \"\"\"Get most recent experiments as a pandas.DataFrame \"\"\" query =", "self.conn.cursor() cur.execute( \"\"\" INSERT INTO experiments (uuid) VALUES(?) \"\"\", [uuid]) cur.close() self.conn.commit() return", "fmt not in {'html', 'plain'}: raise ValueError('fmt must be one \"html\" or \"plain\"')", "experiments SET parameters = ? WHERE uuid = ? \"\"\", [json.dumps(parameters), uuid]) cur.close()", "len(table): title += '(No experiments saved yet)' if fmt == 'plain': title +=", "append options cur = self.conn.cursor() cur.execute( \"\"\" UPDATE experiments SET comment = ?", "?', self.conn, params=[uuid], index_col='uuid') def recent(self, n=5, normalize=False): \"\"\"Get most recent experiments as", "'(No experiments saved yet)' if fmt == 'plain': title += '\\n' if len(table):", "comment FROM experiments ORDER BY created DESC LIMIT ? \"\"\", [n]) res =", "a given uuid \"\"\" # TODO: make it work for a list of", "= ? \"\"\", [comment, uuid]) cur.close() self.conn.commit() def _recent(self, n=5, fmt='html'): if fmt", "row[0] is None if not empty: raise ValueError('Cannot update non-empty experiment with '", "empty = row[0] is None if not empty: raise ValueError('Cannot update non-empty experiment", "TIMESTAMP default current_timestamp, parameters TEXT, comment TEXT ) \"\"\") cur.close() def __getitem__(self, uuid):", "user guide. Parameters ---------- path Database location \"\"\" def __init__(self, path: str): self.conn", "simple experiment tracker using SQLite :doc:`Click here <../user_guide/SQLiteTracker>` to see the user guide.", "NULL UNIQUE, created TIMESTAMP default current_timestamp, parameters TEXT, comment TEXT ) \"\"\") cur.close()", "experiments\") \"\"\" df = pd.read_sql(code, self.conn) if 'uuid' in df: df = df.set_index('uuid')", "exists: empty = row[0] is None if not empty: raise ValueError('Cannot update non-empty", "self.conn, params=[uuid], index_col='uuid') def recent(self, n=5, normalize=False): \"\"\"Get most recent experiments as a", "tracker.insert('my_uuid', {'a': 1}) >>> df = tracker.query( ... \"SELECT uuid, json_extract(parameters, '$.a') FROM", "# parse and normalize json parameters = pd.json_normalize( df.pop('parameters').apply(lambda s: json.loads(s))).set_index( df.index) df", "table = Table(res, header=['uuid', 'created', 'parameters', 'comment']) title_template = '<h4> {} </h4>' if", "title += '\\n' if len(table): footer = (('<br>' if fmt == 'html' else", "in {'html', 'plain'}: raise ValueError('fmt must be one \"html\" or \"plain\"') cur =", "== 'html' else str(table)) + footer) def _can_update(self, uuid): \"\"\"Check if an experiment", "df.insert(len(df.columns), 'comment', comment) return df def query(self, code): \"\"\"Query the database, returns a", "a new experiment \"\"\" cur = self.conn.cursor() cur.execute( \"\"\" INSERT INTO experiments (uuid,", "Database location \"\"\" def __init__(self, path: str): self.conn = sqlite3.connect(path) cur = self.conn.cursor()", "a uuid \"\"\" uuid = uuid4().hex cur = self.conn.cursor() cur.execute( \"\"\" INSERT INTO", "the user guide. Parameters ---------- path Database location \"\"\" def __init__(self, path: str):", "' 'not exist'.format(uuid)) def __repr__(self): return self._recent(fmt='plain') def _repr_html_(self): return self._recent(fmt='html') def __del__(self):", "end comment = df.pop('comment') df.insert(len(df.columns), 'comment', comment) return df def query(self, code): \"\"\"Query", "json_extract(parameters, '$.a') FROM experiments\") \"\"\" df = pd.read_sql(code, self.conn) if 'uuid' in df:", "cur.fetchone() exists = row is not None if exists: empty = row[0] is", "= Table(res, header=['uuid', 'created', 'parameters', 'comment']) title_template = '<h4> {} </h4>' if fmt", "experiment with ' 'uuid \"{}\" because it does ' 'not exist'.format(uuid)) def __repr__(self):", "= ? \"\"\", [json.dumps(parameters), uuid]) cur.close() self.conn.commit() def insert(self, uuid, parameters): \"\"\"Insert a", "uuid = ? \"\"\", [json.dumps(parameters), uuid]) cur.close() self.conn.commit() def insert(self, uuid, parameters): \"\"\"Insert", "return df def query(self, code): \"\"\"Query the database, returns a pandas.DataFrame Examples --------", "= pd.json_normalize( df.pop('parameters').apply(lambda s: json.loads(s))).set_index( df.index) df = df.join(parameters) # re order columns", "of an empty experiment given its uuid \"\"\" self._can_update(uuid) cur = self.conn.cursor() cur.execute(", "import Table class SQLiteTracker: \"\"\"A simple experiment tracker using SQLite :doc:`Click here <../user_guide/SQLiteTracker>`", "\"\"\", [comment, uuid]) cur.close() self.conn.commit() def _recent(self, n=5, fmt='html'): if fmt not in", "uuid4().hex cur = self.conn.cursor() cur.execute( \"\"\" INSERT INTO experiments (uuid) VALUES(?) \"\"\", [uuid])", "def comment(self, uuid, comment): \"\"\"Add a comment to an experiment given its uuid", "current_timestamp, parameters TEXT, comment TEXT ) \"\"\") cur.close() def __getitem__(self, uuid): \"\"\"Get experiment", "with a given uuid \"\"\" # TODO: make it work for a list", "return pd.read_sql('SELECT * FROM experiments WHERE uuid = ?', self.conn, params=[uuid], index_col='uuid') def", "experiment with ' 'uuid \"{}\"'.format(uuid)) else: raise ValueError('Cannot update experiment with ' 'uuid", "uuid, created, parameters, comment FROM experiments ORDER BY created DESC LIMIT ? \"\"\",", "= '<h4> {} </h4>' if fmt == 'html' else '{}\\n' title = title_template.format(type(self).__name__)", "parameters, comment FROM experiments ORDER BY created DESC LIMIT ? \"\"\" df =", ":doc:`Click here <../user_guide/SQLiteTracker>` to see the user guide. Parameters ---------- path Database location", "= row[0] is None if not empty: raise ValueError('Cannot update non-empty experiment with", "INTO experiments (uuid) VALUES(?) \"\"\", [uuid]) cur.close() self.conn.commit() return uuid def update(self, uuid,", "INTO experiments (uuid, parameters) VALUES(?, ?) \"\"\", [uuid, json.dumps(parameters)]) cur.close() self.conn.commit() def comment(self,", "uuid]) cur.close() self.conn.commit() def _recent(self, n=5, fmt='html'): if fmt not in {'html', 'plain'}:", "uuid, parameters): \"\"\"Update the parameters of an empty experiment given its uuid \"\"\"", "uuid, created, parameters, comment FROM experiments ORDER BY created DESC LIMIT ? \"\"\"", "Examples -------- >>> from sklearn_evaluation import SQLiteTracker >>> tracker = SQLiteTracker(':memory:') # example", "TODO: make it work for a list of uuids return pd.read_sql('SELECT * FROM", "cur.execute( \"\"\" UPDATE experiments SET parameters = ? WHERE uuid = ? \"\"\",", "fmt == 'plain': title += '\\n' if len(table): footer = (('<br>' if fmt", "cur = self.conn.cursor() cur.execute( \"\"\" SELECT uuid, created, parameters, comment FROM experiments ORDER", "a pandas.DataFrame \"\"\" query = \"\"\" SELECT uuid, created, parameters, comment FROM experiments", "the database, returns a pandas.DataFrame Examples -------- >>> from sklearn_evaluation import SQLiteTracker >>>", "SELECT uuid, created, parameters, comment FROM experiments ORDER BY created DESC LIMIT ?", "title_template = '<h4> {} </h4>' if fmt == 'html' else '{}\\n' title =", "\"\"\"Create a new experiment, returns a uuid \"\"\" uuid = uuid4().hex cur =", "cur.close() self.conn.commit() def _recent(self, n=5, fmt='html'): if fmt not in {'html', 'plain'}: raise", "+= '\\n' if len(table): footer = (('<br>' if fmt == 'html' else '\\n')", "show \"comment\" at the end comment = df.pop('comment') df.insert(len(df.columns), 'comment', comment) return df", "self._can_update(uuid) cur = self.conn.cursor() cur.execute( \"\"\" UPDATE experiments SET parameters = ? WHERE", "None if not empty: raise ValueError('Cannot update non-empty experiment with ' 'uuid \"{}\"'.format(uuid))", "does ' 'not exist'.format(uuid)) def __repr__(self): return self._recent(fmt='plain') def _repr_html_(self): return self._recent(fmt='html') def", "see the user guide. Parameters ---------- path Database location \"\"\" def __init__(self, path:", "\"\"\"Update the parameters of an empty experiment given its uuid \"\"\" self._can_update(uuid) cur", "? \"\"\", [comment, uuid]) cur.close() self.conn.commit() def _recent(self, n=5, fmt='html'): if fmt not", "def insert(self, uuid, parameters): \"\"\"Insert a new experiment \"\"\" cur = self.conn.cursor() cur.execute(", "\"\"\" # TODO: make it work for a list of uuids return pd.read_sql('SELECT", "= self.conn.cursor() cur.execute( \"\"\" SELECT uuid, created, parameters, comment FROM experiments ORDER BY", "-------- >>> from sklearn_evaluation import SQLiteTracker >>> tracker = SQLiteTracker(':memory:') # example in-memory", "{'a': 1}) >>> df = tracker.query( ... \"SELECT uuid, json_extract(parameters, '$.a') FROM experiments\")", "\"\"\" query = \"\"\" SELECT uuid, created, parameters, comment FROM experiments ORDER BY", "self.conn.commit() def insert(self, uuid, parameters): \"\"\"Insert a new experiment \"\"\" cur = self.conn.cursor()", "def _recent(self, n=5, fmt='html'): if fmt not in {'html', 'plain'}: raise ValueError('fmt must", "TEXT ) \"\"\") cur.close() def __getitem__(self, uuid): \"\"\"Get experiment with a given uuid", "self.conn.cursor() cur.execute( \"\"\" UPDATE experiments SET comment = ? WHERE uuid = ?", "'comment', comment) return df def query(self, code): \"\"\"Query the database, returns a pandas.DataFrame", "is None if not empty: raise ValueError('Cannot update non-empty experiment with ' 'uuid", "cur.execute(\"\"\" CREATE TABLE IF NOT EXISTS experiments ( uuid TEXT NOT NULL UNIQUE,", "tracker = SQLiteTracker(':memory:') # example in-memory db >>> tracker.insert('my_uuid', {'a': 1}) >>> df", "\"\"\" cur = self.conn.cursor() cur.execute( \"\"\" INSERT INTO experiments (uuid, parameters) VALUES(?, ?)", "FROM experiments WHERE uuid = ?', self.conn, params=[uuid], index_col='uuid') def recent(self, n=5, normalize=False):", "\"\"\"Query the database, returns a pandas.DataFrame Examples -------- >>> from sklearn_evaluation import SQLiteTracker", "1}) >>> df = tracker.query( ... \"SELECT uuid, json_extract(parameters, '$.a') FROM experiments\") \"\"\"", "self.conn) if 'uuid' in df: df = df.set_index('uuid') return df def new(self): \"\"\"Create", "given its uuid \"\"\" self._can_update(uuid) cur = self.conn.cursor() cur.execute( \"\"\" UPDATE experiments SET", "experiments WHERE uuid = ? \"\"\", [uuid]) row = cur.fetchone() exists = row", "path: str): self.conn = sqlite3.connect(path) cur = self.conn.cursor() cur.execute(\"\"\" CREATE TABLE IF NOT", "query(self, code): \"\"\"Query the database, returns a pandas.DataFrame Examples -------- >>> from sklearn_evaluation", "empty experiment given its uuid \"\"\" self._can_update(uuid) cur = self.conn.cursor() cur.execute( \"\"\" UPDATE", "\"\"\") cur.close() def __getitem__(self, uuid): \"\"\"Get experiment with a given uuid \"\"\" #", "'plain'}: raise ValueError('fmt must be one \"html\" or \"plain\"') cur = self.conn.cursor() cur.execute(", "title += '(No experiments saved yet)' if fmt == 'plain': title += '\\n'", "by default) and append options cur = self.conn.cursor() cur.execute( \"\"\" UPDATE experiments SET", "non-empty experiment with ' 'uuid \"{}\"'.format(uuid)) else: raise ValueError('Cannot update experiment with '", "cur.close() self.conn.commit() def insert(self, uuid, parameters): \"\"\"Insert a new experiment \"\"\" cur =", "make it work for a list of uuids return pd.read_sql('SELECT * FROM experiments", "with a given uuid can be updated \"\"\" cur = self.conn.cursor() cur.execute( \"\"\"", "EXISTS experiments ( uuid TEXT NOT NULL UNIQUE, created TIMESTAMP default current_timestamp, parameters", "experiments saved yet)' if fmt == 'plain': title += '\\n' if len(table): footer", "if len(table): footer = (('<br>' if fmt == 'html' else '\\n') + '(Most", "parse and normalize json parameters = pd.json_normalize( df.pop('parameters').apply(lambda s: json.loads(s))).set_index( df.index) df =", "\"plain\"') cur = self.conn.cursor() cur.execute( \"\"\" SELECT uuid, created, parameters, comment FROM experiments", "[json.dumps(parameters), uuid]) cur.close() self.conn.commit() def insert(self, uuid, parameters): \"\"\"Insert a new experiment \"\"\"", "comment FROM experiments ORDER BY created DESC LIMIT ? \"\"\" df = pd.read_sql(query,", "str): self.conn = sqlite3.connect(path) cur = self.conn.cursor() cur.execute(\"\"\" CREATE TABLE IF NOT EXISTS", "path Database location \"\"\" def __init__(self, path: str): self.conn = sqlite3.connect(path) cur =", "\"\"\"Check if an experiment with a given uuid can be updated \"\"\" cur", "df.index) df = df.join(parameters) # re order columns to show \"comment\" at the", "if fmt == 'plain': title += '\\n' if len(table): footer = (('<br>' if", "def _can_update(self, uuid): \"\"\"Check if an experiment with a given uuid can be", "</h4>' if fmt == 'html' else '{}\\n' title = title_template.format(type(self).__name__) if not len(table):", "UPDATE experiments SET comment = ? WHERE uuid = ? \"\"\", [comment, uuid])", "self.conn.commit() def _recent(self, n=5, fmt='html'): if fmt not in {'html', 'plain'}: raise ValueError('fmt", "be one \"html\" or \"plain\"') cur = self.conn.cursor() cur.execute( \"\"\" SELECT uuid, created,", "\"\"\"A simple experiment tracker using SQLite :doc:`Click here <../user_guide/SQLiteTracker>` to see the user", "and append options cur = self.conn.cursor() cur.execute( \"\"\" UPDATE experiments SET comment =", ">>> from sklearn_evaluation import SQLiteTracker >>> tracker = SQLiteTracker(':memory:') # example in-memory db", "= (('<br>' if fmt == 'html' else '\\n') + '(Most recent experiments)') else:", "uuid import uuid4 import sqlite3 import json import pandas as pd from sklearn_evaluation.table", "location \"\"\" def __init__(self, path: str): self.conn = sqlite3.connect(path) cur = self.conn.cursor() cur.execute(\"\"\"", "return df def new(self): \"\"\"Create a new experiment, returns a uuid \"\"\" uuid", "\"\"\" INSERT INTO experiments (uuid) VALUES(?) \"\"\", [uuid]) cur.close() self.conn.commit() return uuid def", "query = \"\"\" SELECT uuid, created, parameters, comment FROM experiments ORDER BY created", "INSERT INTO experiments (uuid) VALUES(?) \"\"\", [uuid]) cur.close() self.conn.commit() return uuid def update(self,", "experiment with a given uuid \"\"\" # TODO: make it work for a", "cur = self.conn.cursor() cur.execute( \"\"\" INSERT INTO experiments (uuid, parameters) VALUES(?, ?) \"\"\",", "an experiment given its uuid \"\"\" # TODO: add overwrite (false by default)", "order columns to show \"comment\" at the end comment = df.pop('comment') df.insert(len(df.columns), 'comment',", "uuid \"\"\" # TODO: add overwrite (false by default) and append options cur", "experiments ORDER BY created DESC LIMIT ? \"\"\" df = pd.read_sql(query, self.conn, params=[n],", "'plain': title += '\\n' if len(table): footer = (('<br>' if fmt == 'html'", "= cur.fetchall() table = Table(res, header=['uuid', 'created', 'parameters', 'comment']) title_template = '<h4> {}", "# TODO: make it work for a list of uuids return pd.read_sql('SELECT *", "= df.set_index('uuid') return df def new(self): \"\"\"Create a new experiment, returns a uuid", "? \"\"\", [json.dumps(parameters), uuid]) cur.close() self.conn.commit() def insert(self, uuid, parameters): \"\"\"Insert a new", "\"\"\" df = pd.read_sql(query, self.conn, params=[n], index_col='uuid') if normalize: # parse and normalize", "?) \"\"\", [uuid, json.dumps(parameters)]) cur.close() self.conn.commit() def comment(self, uuid, comment): \"\"\"Add a comment", "SET parameters = ? WHERE uuid = ? \"\"\", [json.dumps(parameters), uuid]) cur.close() self.conn.commit()", "SQLite :doc:`Click here <../user_guide/SQLiteTracker>` to see the user guide. Parameters ---------- path Database", "'\\n') + '(Most recent experiments)') else: footer = '' return (title + (table.to_html()", "def __getitem__(self, uuid): \"\"\"Get experiment with a given uuid \"\"\" # TODO: make", "= self.conn.cursor() cur.execute( \"\"\" UPDATE experiments SET parameters = ? WHERE uuid =", "DESC LIMIT ? \"\"\" df = pd.read_sql(query, self.conn, params=[n], index_col='uuid') if normalize: #", "to show \"comment\" at the end comment = df.pop('comment') df.insert(len(df.columns), 'comment', comment) return", "uuid can be updated \"\"\" cur = self.conn.cursor() cur.execute( \"\"\" SELECT parameters FROM", "updated \"\"\" cur = self.conn.cursor() cur.execute( \"\"\" SELECT parameters FROM experiments WHERE uuid", "footer = (('<br>' if fmt == 'html' else '\\n') + '(Most recent experiments)')", "experiment with a given uuid can be updated \"\"\" cur = self.conn.cursor() cur.execute(", "fmt='html'): if fmt not in {'html', 'plain'}: raise ValueError('fmt must be one \"html\"", "pd.json_normalize( df.pop('parameters').apply(lambda s: json.loads(s))).set_index( df.index) df = df.join(parameters) # re order columns to", "update(self, uuid, parameters): \"\"\"Update the parameters of an empty experiment given its uuid", "the parameters of an empty experiment given its uuid \"\"\" self._can_update(uuid) cur =", "work for a list of uuids return pd.read_sql('SELECT * FROM experiments WHERE uuid", "index_col='uuid') def recent(self, n=5, normalize=False): \"\"\"Get most recent experiments as a pandas.DataFrame \"\"\"", "def query(self, code): \"\"\"Query the database, returns a pandas.DataFrame Examples -------- >>> from", "cur.close() self.conn.commit() def comment(self, uuid, comment): \"\"\"Add a comment to an experiment given", "guide. Parameters ---------- path Database location \"\"\" def __init__(self, path: str): self.conn =", "comment to an experiment given its uuid \"\"\" # TODO: add overwrite (false", "pandas.DataFrame \"\"\" query = \"\"\" SELECT uuid, created, parameters, comment FROM experiments ORDER", "created, parameters, comment FROM experiments ORDER BY created DESC LIMIT ? \"\"\" df", "parameters TEXT, comment TEXT ) \"\"\") cur.close() def __getitem__(self, uuid): \"\"\"Get experiment with", "footer = '' return (title + (table.to_html() if fmt == 'html' else str(table))", "database, returns a pandas.DataFrame Examples -------- >>> from sklearn_evaluation import SQLiteTracker >>> tracker", "def recent(self, n=5, normalize=False): \"\"\"Get most recent experiments as a pandas.DataFrame \"\"\" query", "ValueError('Cannot update experiment with ' 'uuid \"{}\" because it does ' 'not exist'.format(uuid))", "len(table): footer = (('<br>' if fmt == 'html' else '\\n') + '(Most recent", "? \"\"\", [n]) res = cur.fetchall() table = Table(res, header=['uuid', 'created', 'parameters', 'comment'])" ]
[]
[]
[ "in the top level # directory for more details. import sys from testrunner", "# # This file is subject to the terms and conditions of the", "This file is subject to the terms and conditions of the GNU Lesser", "file is subject to the terms and conditions of the GNU Lesser #", "to the terms and conditions of the GNU Lesser # General Public License", "file LICENSE in the top level # directory for more details. import sys", "terms and conditions of the GNU Lesser # General Public License v2.1. See", "# Copyright (C) 2019 <NAME> <<EMAIL>> # # This file is subject to", "#!/usr/bin/env python3 # Copyright (C) 2019 <NAME> <<EMAIL>> # # This file is", "python3 # Copyright (C) 2019 <NAME> <<EMAIL>> # # This file is subject", "GNU Lesser # General Public License v2.1. See the file LICENSE in the", "import run def testfunc(child): child.expect_exact(\"Hello blob!\") child.expect_exact(\"Hello blob_subdir!\") child.expect_exact(\"0x00\") child.expect_exact(\"0x01\") child.expect_exact(\"0x02\") child.expect_exact(\"0x03\") child.expect_exact(\"0xFF\")", "def testfunc(child): child.expect_exact(\"Hello blob!\") child.expect_exact(\"Hello blob_subdir!\") child.expect_exact(\"0x00\") child.expect_exact(\"0x01\") child.expect_exact(\"0x02\") child.expect_exact(\"0x03\") child.expect_exact(\"0xFF\") if __name__", "testfunc(child): child.expect_exact(\"Hello blob!\") child.expect_exact(\"Hello blob_subdir!\") child.expect_exact(\"0x00\") child.expect_exact(\"0x01\") child.expect_exact(\"0x02\") child.expect_exact(\"0x03\") child.expect_exact(\"0xFF\") if __name__ ==", "(C) 2019 <NAME> <<EMAIL>> # # This file is subject to the terms", "details. import sys from testrunner import run def testfunc(child): child.expect_exact(\"Hello blob!\") child.expect_exact(\"Hello blob_subdir!\")", "# General Public License v2.1. See the file LICENSE in the top level", "Public License v2.1. See the file LICENSE in the top level # directory", "<NAME> <<EMAIL>> # # This file is subject to the terms and conditions", "the file LICENSE in the top level # directory for more details. import", "v2.1. See the file LICENSE in the top level # directory for more", "License v2.1. See the file LICENSE in the top level # directory for", "General Public License v2.1. See the file LICENSE in the top level #", "conditions of the GNU Lesser # General Public License v2.1. See the file", "and conditions of the GNU Lesser # General Public License v2.1. See the", "# directory for more details. import sys from testrunner import run def testfunc(child):", "more details. import sys from testrunner import run def testfunc(child): child.expect_exact(\"Hello blob!\") child.expect_exact(\"Hello", "of the GNU Lesser # General Public License v2.1. See the file LICENSE", "sys from testrunner import run def testfunc(child): child.expect_exact(\"Hello blob!\") child.expect_exact(\"Hello blob_subdir!\") child.expect_exact(\"0x00\") child.expect_exact(\"0x01\")", "level # directory for more details. import sys from testrunner import run def", "for more details. import sys from testrunner import run def testfunc(child): child.expect_exact(\"Hello blob!\")", "child.expect_exact(\"Hello blob!\") child.expect_exact(\"Hello blob_subdir!\") child.expect_exact(\"0x00\") child.expect_exact(\"0x01\") child.expect_exact(\"0x02\") child.expect_exact(\"0x03\") child.expect_exact(\"0xFF\") if __name__ == \"__main__\":", "LICENSE in the top level # directory for more details. import sys from", "See the file LICENSE in the top level # directory for more details.", "the GNU Lesser # General Public License v2.1. See the file LICENSE in", "top level # directory for more details. import sys from testrunner import run", "Lesser # General Public License v2.1. See the file LICENSE in the top", "subject to the terms and conditions of the GNU Lesser # General Public", "Copyright (C) 2019 <NAME> <<EMAIL>> # # This file is subject to the", "run def testfunc(child): child.expect_exact(\"Hello blob!\") child.expect_exact(\"Hello blob_subdir!\") child.expect_exact(\"0x00\") child.expect_exact(\"0x01\") child.expect_exact(\"0x02\") child.expect_exact(\"0x03\") child.expect_exact(\"0xFF\") if", "blob!\") child.expect_exact(\"Hello blob_subdir!\") child.expect_exact(\"0x00\") child.expect_exact(\"0x01\") child.expect_exact(\"0x02\") child.expect_exact(\"0x03\") child.expect_exact(\"0xFF\") if __name__ == \"__main__\": sys.exit(run(testfunc))", "the terms and conditions of the GNU Lesser # General Public License v2.1.", "# This file is subject to the terms and conditions of the GNU", "<reponame>ARte-team/ARte #!/usr/bin/env python3 # Copyright (C) 2019 <NAME> <<EMAIL>> # # This file", "import sys from testrunner import run def testfunc(child): child.expect_exact(\"Hello blob!\") child.expect_exact(\"Hello blob_subdir!\") child.expect_exact(\"0x00\")", "2019 <NAME> <<EMAIL>> # # This file is subject to the terms and", "is subject to the terms and conditions of the GNU Lesser # General", "<<EMAIL>> # # This file is subject to the terms and conditions of", "directory for more details. import sys from testrunner import run def testfunc(child): child.expect_exact(\"Hello", "from testrunner import run def testfunc(child): child.expect_exact(\"Hello blob!\") child.expect_exact(\"Hello blob_subdir!\") child.expect_exact(\"0x00\") child.expect_exact(\"0x01\") child.expect_exact(\"0x02\")", "the top level # directory for more details. import sys from testrunner import", "testrunner import run def testfunc(child): child.expect_exact(\"Hello blob!\") child.expect_exact(\"Hello blob_subdir!\") child.expect_exact(\"0x00\") child.expect_exact(\"0x01\") child.expect_exact(\"0x02\") child.expect_exact(\"0x03\")" ]
[]
[ "obj[\"consumers\"] for consumer in consumers: for topic in consumer['topics']: cons = consumer['name'] value", "#!/usr/bin/python2.7 # -*- coding: utf-8 -*- import sys import requests import ConfigParser import", "e: print e sys.exit(1) return obj.json() # get all exist clusters for url", "[] for el in obj[\"clusters\"][\"active\"]: clusters.append(el[\"name\"]) return clusters def prepare_graphite_metrics(base_url,graphite_prefix): clusters = get_clusters(base_url)", "'.' + consumer['type'] + '.' + cons.replace('.','_') + '.' + topic.replace('.','_') message =", "config.read(os.path.dirname(__file__) + '/consumer_lag.ini') base_url = config.get('api','url') bulk = prepare_graphite_metrics(base_url,config.get('graphite','prefix')) message = '\\n' .", "cons.replace('.','_') + '.' + topic.replace('.','_') message = '%s %s %d' % (value, consumer['lags'][topic],", "e sys.exit(1) return obj.json() # get all exist clusters for url concatenate in", "for url concatenate in future def get_clusters(base_url): obj = get_json_object(base_url + '/api/status/clusters') clusters", "all exist clusters for url concatenate in future def get_clusters(base_url): obj = get_json_object(base_url", "+ '.' + cons.replace('.','_') + '.' + topic.replace('.','_') message = '%s %s %d'", "in future def get_clusters(base_url): obj = get_json_object(base_url + '/api/status/clusters') clusters = [] for", "return obj.json() # get all exist clusters for url concatenate in future def", "def get_clusters(base_url): obj = get_json_object(base_url + '/api/status/clusters') clusters = [] for el in", "consumer['type'] + '.' + cons.replace('.','_') + '.' + topic.replace('.','_') message = '%s %s", "<reponame>aseev-xx/kafka-consumer-lag-metrics<filename>consumer_lag.py #!/usr/bin/python2.7 # -*- coding: utf-8 -*- import sys import requests import ConfigParser", "+ topic.replace('.','_') message = '%s %s %d' % (value, consumer['lags'][topic], timestamp) metrics.append(message) return", "clusters = get_clusters(base_url) metrics = [] timestamp = int(time.time()) for cluster in clusters:", "coding: utf-8 -*- import sys import requests import ConfigParser import time import socket", "# get base json object for next def get_json_object(url): try: obj = requests.get(url)", "timestamp = int(time.time()) for cluster in clusters: url = base_url + '/api/status/' +", "sys import requests import ConfigParser import time import socket import os # get", "return metrics def send_graphite_metrics(message,graphite_host,graphite_port): print 'sending message:\\n%s' % message sock = socket.socket() sock.connect((graphite_host,graphite_port))", "for cluster in clusters: url = base_url + '/api/status/' + cluster + '/consumersSummary'", "obj = get_json_object(base_url + '/api/status/clusters') clusters = [] for el in obj[\"clusters\"][\"active\"]: clusters.append(el[\"name\"])", "get_json_object(url) consumers = obj[\"consumers\"] for consumer in consumers: for topic in consumer['topics']: cons", "+ '/consumersSummary' obj = get_json_object(url) consumers = obj[\"consumers\"] for consumer in consumers: for", "sock = socket.socket() sock.connect((graphite_host,graphite_port)) sock.sendall(message) sock.close() def main(): config = ConfigParser.SafeConfigParser() config.read(os.path.dirname(__file__) +", "sys.exit(1) return obj.json() # get all exist clusters for url concatenate in future", "exist clusters for url concatenate in future def get_clusters(base_url): obj = get_json_object(base_url +", "try: obj = requests.get(url) except requests.exceptions.RequestException as e: print e sys.exit(1) return obj.json()", "get all exist clusters for url concatenate in future def get_clusters(base_url): obj =", "clusters.append(el[\"name\"]) return clusters def prepare_graphite_metrics(base_url,graphite_prefix): clusters = get_clusters(base_url) metrics = [] timestamp =", "in clusters: url = base_url + '/api/status/' + cluster + '/consumersSummary' obj =", "message = '\\n' . join(bulk) + '\\n' send_graphite_metrics(message,config.get('graphite','host'),int(config.get('graphite','port'))) if __name__ == \"__main__\": main()", "get_clusters(base_url): obj = get_json_object(base_url + '/api/status/clusters') clusters = [] for el in obj[\"clusters\"][\"active\"]:", "= obj[\"consumers\"] for consumer in consumers: for topic in consumer['topics']: cons = consumer['name']", "url = base_url + '/api/status/' + cluster + '/consumersSummary' obj = get_json_object(url) consumers", "'sending message:\\n%s' % message sock = socket.socket() sock.connect((graphite_host,graphite_port)) sock.sendall(message) sock.close() def main(): config", "concatenate in future def get_clusters(base_url): obj = get_json_object(base_url + '/api/status/clusters') clusters = []", "int(time.time()) for cluster in clusters: url = base_url + '/api/status/' + cluster +", "prepare_graphite_metrics(base_url,config.get('graphite','prefix')) message = '\\n' . join(bulk) + '\\n' send_graphite_metrics(message,config.get('graphite','host'),int(config.get('graphite','port'))) if __name__ == \"__main__\":", "requests.get(url) except requests.exceptions.RequestException as e: print e sys.exit(1) return obj.json() # get all", "= [] timestamp = int(time.time()) for cluster in clusters: url = base_url +", "= ConfigParser.SafeConfigParser() config.read(os.path.dirname(__file__) + '/consumer_lag.ini') base_url = config.get('api','url') bulk = prepare_graphite_metrics(base_url,config.get('graphite','prefix')) message =", "consumer['lags'][topic], timestamp) metrics.append(message) return metrics def send_graphite_metrics(message,graphite_host,graphite_port): print 'sending message:\\n%s' % message sock", "def prepare_graphite_metrics(base_url,graphite_prefix): clusters = get_clusters(base_url) metrics = [] timestamp = int(time.time()) for cluster", "def main(): config = ConfigParser.SafeConfigParser() config.read(os.path.dirname(__file__) + '/consumer_lag.ini') base_url = config.get('api','url') bulk =", "requests import ConfigParser import time import socket import os # get base json", "import os # get base json object for next def get_json_object(url): try: obj", "'/api/status/' + cluster + '/consumersSummary' obj = get_json_object(url) consumers = obj[\"consumers\"] for consumer", "get_json_object(url): try: obj = requests.get(url) except requests.exceptions.RequestException as e: print e sys.exit(1) return", "utf-8 -*- import sys import requests import ConfigParser import time import socket import", "cons = consumer['name'] value = graphite_prefix + cluster.replace('.','_') + '.' + consumer['type'] +", "% (value, consumer['lags'][topic], timestamp) metrics.append(message) return metrics def send_graphite_metrics(message,graphite_host,graphite_port): print 'sending message:\\n%s' %", "'/api/status/clusters') clusters = [] for el in obj[\"clusters\"][\"active\"]: clusters.append(el[\"name\"]) return clusters def prepare_graphite_metrics(base_url,graphite_prefix):", "def get_json_object(url): try: obj = requests.get(url) except requests.exceptions.RequestException as e: print e sys.exit(1)", "-*- import sys import requests import ConfigParser import time import socket import os", "config = ConfigParser.SafeConfigParser() config.read(os.path.dirname(__file__) + '/consumer_lag.ini') base_url = config.get('api','url') bulk = prepare_graphite_metrics(base_url,config.get('graphite','prefix')) message", "ConfigParser import time import socket import os # get base json object for", "for topic in consumer['topics']: cons = consumer['name'] value = graphite_prefix + cluster.replace('.','_') +", "requests.exceptions.RequestException as e: print e sys.exit(1) return obj.json() # get all exist clusters", "%d' % (value, consumer['lags'][topic], timestamp) metrics.append(message) return metrics def send_graphite_metrics(message,graphite_host,graphite_port): print 'sending message:\\n%s'", "message:\\n%s' % message sock = socket.socket() sock.connect((graphite_host,graphite_port)) sock.sendall(message) sock.close() def main(): config =", "consumer in consumers: for topic in consumer['topics']: cons = consumer['name'] value = graphite_prefix", "message sock = socket.socket() sock.connect((graphite_host,graphite_port)) sock.sendall(message) sock.close() def main(): config = ConfigParser.SafeConfigParser() config.read(os.path.dirname(__file__)", "= '%s %s %d' % (value, consumer['lags'][topic], timestamp) metrics.append(message) return metrics def send_graphite_metrics(message,graphite_host,graphite_port):", "+ cons.replace('.','_') + '.' + topic.replace('.','_') message = '%s %s %d' % (value,", "'/consumersSummary' obj = get_json_object(url) consumers = obj[\"consumers\"] for consumer in consumers: for topic", "+ '.' + topic.replace('.','_') message = '%s %s %d' % (value, consumer['lags'][topic], timestamp)", "for next def get_json_object(url): try: obj = requests.get(url) except requests.exceptions.RequestException as e: print", "value = graphite_prefix + cluster.replace('.','_') + '.' + consumer['type'] + '.' + cons.replace('.','_')", "clusters: url = base_url + '/api/status/' + cluster + '/consumersSummary' obj = get_json_object(url)", "'%s %s %d' % (value, consumer['lags'][topic], timestamp) metrics.append(message) return metrics def send_graphite_metrics(message,graphite_host,graphite_port): print", "= base_url + '/api/status/' + cluster + '/consumersSummary' obj = get_json_object(url) consumers =", "# -*- coding: utf-8 -*- import sys import requests import ConfigParser import time", "print 'sending message:\\n%s' % message sock = socket.socket() sock.connect((graphite_host,graphite_port)) sock.sendall(message) sock.close() def main():", "'.' + topic.replace('.','_') message = '%s %s %d' % (value, consumer['lags'][topic], timestamp) metrics.append(message)", "in consumer['topics']: cons = consumer['name'] value = graphite_prefix + cluster.replace('.','_') + '.' +", "= requests.get(url) except requests.exceptions.RequestException as e: print e sys.exit(1) return obj.json() # get", "metrics def send_graphite_metrics(message,graphite_host,graphite_port): print 'sending message:\\n%s' % message sock = socket.socket() sock.connect((graphite_host,graphite_port)) sock.sendall(message)", "consumers = obj[\"consumers\"] for consumer in consumers: for topic in consumer['topics']: cons =", "cluster.replace('.','_') + '.' + consumer['type'] + '.' + cons.replace('.','_') + '.' + topic.replace('.','_')", "return clusters def prepare_graphite_metrics(base_url,graphite_prefix): clusters = get_clusters(base_url) metrics = [] timestamp = int(time.time())", "in obj[\"clusters\"][\"active\"]: clusters.append(el[\"name\"]) return clusters def prepare_graphite_metrics(base_url,graphite_prefix): clusters = get_clusters(base_url) metrics = []", "topic.replace('.','_') message = '%s %s %d' % (value, consumer['lags'][topic], timestamp) metrics.append(message) return metrics", "ConfigParser.SafeConfigParser() config.read(os.path.dirname(__file__) + '/consumer_lag.ini') base_url = config.get('api','url') bulk = prepare_graphite_metrics(base_url,config.get('graphite','prefix')) message = '\\n'", "# get all exist clusters for url concatenate in future def get_clusters(base_url): obj", "'/consumer_lag.ini') base_url = config.get('api','url') bulk = prepare_graphite_metrics(base_url,config.get('graphite','prefix')) message = '\\n' . join(bulk) +", "clusters def prepare_graphite_metrics(base_url,graphite_prefix): clusters = get_clusters(base_url) metrics = [] timestamp = int(time.time()) for", "base_url = config.get('api','url') bulk = prepare_graphite_metrics(base_url,config.get('graphite','prefix')) message = '\\n' . join(bulk) + '\\n'", "url concatenate in future def get_clusters(base_url): obj = get_json_object(base_url + '/api/status/clusters') clusters =", "= socket.socket() sock.connect((graphite_host,graphite_port)) sock.sendall(message) sock.close() def main(): config = ConfigParser.SafeConfigParser() config.read(os.path.dirname(__file__) + '/consumer_lag.ini')", "object for next def get_json_object(url): try: obj = requests.get(url) except requests.exceptions.RequestException as e:", "next def get_json_object(url): try: obj = requests.get(url) except requests.exceptions.RequestException as e: print e", "except requests.exceptions.RequestException as e: print e sys.exit(1) return obj.json() # get all exist", "obj = get_json_object(url) consumers = obj[\"consumers\"] for consumer in consumers: for topic in", "= [] for el in obj[\"clusters\"][\"active\"]: clusters.append(el[\"name\"]) return clusters def prepare_graphite_metrics(base_url,graphite_prefix): clusters =", "socket.socket() sock.connect((graphite_host,graphite_port)) sock.sendall(message) sock.close() def main(): config = ConfigParser.SafeConfigParser() config.read(os.path.dirname(__file__) + '/consumer_lag.ini') base_url", "base json object for next def get_json_object(url): try: obj = requests.get(url) except requests.exceptions.RequestException", "import sys import requests import ConfigParser import time import socket import os #", "cluster in clusters: url = base_url + '/api/status/' + cluster + '/consumersSummary' obj", "timestamp) metrics.append(message) return metrics def send_graphite_metrics(message,graphite_host,graphite_port): print 'sending message:\\n%s' % message sock =", "= config.get('api','url') bulk = prepare_graphite_metrics(base_url,config.get('graphite','prefix')) message = '\\n' . join(bulk) + '\\n' send_graphite_metrics(message,config.get('graphite','host'),int(config.get('graphite','port')))", "time import socket import os # get base json object for next def", "'.' + cons.replace('.','_') + '.' + topic.replace('.','_') message = '%s %s %d' %", "+ '/api/status/clusters') clusters = [] for el in obj[\"clusters\"][\"active\"]: clusters.append(el[\"name\"]) return clusters def", "sock.sendall(message) sock.close() def main(): config = ConfigParser.SafeConfigParser() config.read(os.path.dirname(__file__) + '/consumer_lag.ini') base_url = config.get('api','url')", "+ cluster + '/consumersSummary' obj = get_json_object(url) consumers = obj[\"consumers\"] for consumer in", "import socket import os # get base json object for next def get_json_object(url):", "prepare_graphite_metrics(base_url,graphite_prefix): clusters = get_clusters(base_url) metrics = [] timestamp = int(time.time()) for cluster in", "+ '.' + consumer['type'] + '.' + cons.replace('.','_') + '.' + topic.replace('.','_') message", "os # get base json object for next def get_json_object(url): try: obj =", "bulk = prepare_graphite_metrics(base_url,config.get('graphite','prefix')) message = '\\n' . join(bulk) + '\\n' send_graphite_metrics(message,config.get('graphite','host'),int(config.get('graphite','port'))) if __name__", "send_graphite_metrics(message,graphite_host,graphite_port): print 'sending message:\\n%s' % message sock = socket.socket() sock.connect((graphite_host,graphite_port)) sock.sendall(message) sock.close() def", "clusters = [] for el in obj[\"clusters\"][\"active\"]: clusters.append(el[\"name\"]) return clusters def prepare_graphite_metrics(base_url,graphite_prefix): clusters", "consumers: for topic in consumer['topics']: cons = consumer['name'] value = graphite_prefix + cluster.replace('.','_')", "= consumer['name'] value = graphite_prefix + cluster.replace('.','_') + '.' + consumer['type'] + '.'", "def send_graphite_metrics(message,graphite_host,graphite_port): print 'sending message:\\n%s' % message sock = socket.socket() sock.connect((graphite_host,graphite_port)) sock.sendall(message) sock.close()", "= get_clusters(base_url) metrics = [] timestamp = int(time.time()) for cluster in clusters: url", "import ConfigParser import time import socket import os # get base json object", "+ consumer['type'] + '.' + cons.replace('.','_') + '.' + topic.replace('.','_') message = '%s", "get_clusters(base_url) metrics = [] timestamp = int(time.time()) for cluster in clusters: url =", "for el in obj[\"clusters\"][\"active\"]: clusters.append(el[\"name\"]) return clusters def prepare_graphite_metrics(base_url,graphite_prefix): clusters = get_clusters(base_url) metrics", "sock.connect((graphite_host,graphite_port)) sock.sendall(message) sock.close() def main(): config = ConfigParser.SafeConfigParser() config.read(os.path.dirname(__file__) + '/consumer_lag.ini') base_url =", "base_url + '/api/status/' + cluster + '/consumersSummary' obj = get_json_object(url) consumers = obj[\"consumers\"]", "+ cluster.replace('.','_') + '.' + consumer['type'] + '.' + cons.replace('.','_') + '.' +", "cluster + '/consumersSummary' obj = get_json_object(url) consumers = obj[\"consumers\"] for consumer in consumers:", "as e: print e sys.exit(1) return obj.json() # get all exist clusters for", "get base json object for next def get_json_object(url): try: obj = requests.get(url) except", "import requests import ConfigParser import time import socket import os # get base", "consumer['topics']: cons = consumer['name'] value = graphite_prefix + cluster.replace('.','_') + '.' + consumer['type']", "%s %d' % (value, consumer['lags'][topic], timestamp) metrics.append(message) return metrics def send_graphite_metrics(message,graphite_host,graphite_port): print 'sending", "config.get('api','url') bulk = prepare_graphite_metrics(base_url,config.get('graphite','prefix')) message = '\\n' . join(bulk) + '\\n' send_graphite_metrics(message,config.get('graphite','host'),int(config.get('graphite','port'))) if", "socket import os # get base json object for next def get_json_object(url): try:", "print e sys.exit(1) return obj.json() # get all exist clusters for url concatenate", "import time import socket import os # get base json object for next", "el in obj[\"clusters\"][\"active\"]: clusters.append(el[\"name\"]) return clusters def prepare_graphite_metrics(base_url,graphite_prefix): clusters = get_clusters(base_url) metrics =", "metrics = [] timestamp = int(time.time()) for cluster in clusters: url = base_url", "= graphite_prefix + cluster.replace('.','_') + '.' + consumer['type'] + '.' + cons.replace('.','_') +", "main(): config = ConfigParser.SafeConfigParser() config.read(os.path.dirname(__file__) + '/consumer_lag.ini') base_url = config.get('api','url') bulk = prepare_graphite_metrics(base_url,config.get('graphite','prefix'))", "% message sock = socket.socket() sock.connect((graphite_host,graphite_port)) sock.sendall(message) sock.close() def main(): config = ConfigParser.SafeConfigParser()", "in consumers: for topic in consumer['topics']: cons = consumer['name'] value = graphite_prefix +", "json object for next def get_json_object(url): try: obj = requests.get(url) except requests.exceptions.RequestException as", "sock.close() def main(): config = ConfigParser.SafeConfigParser() config.read(os.path.dirname(__file__) + '/consumer_lag.ini') base_url = config.get('api','url') bulk", "(value, consumer['lags'][topic], timestamp) metrics.append(message) return metrics def send_graphite_metrics(message,graphite_host,graphite_port): print 'sending message:\\n%s' % message", "+ '/consumer_lag.ini') base_url = config.get('api','url') bulk = prepare_graphite_metrics(base_url,config.get('graphite','prefix')) message = '\\n' . join(bulk)", "message = '%s %s %d' % (value, consumer['lags'][topic], timestamp) metrics.append(message) return metrics def", "obj.json() # get all exist clusters for url concatenate in future def get_clusters(base_url):", "= get_json_object(base_url + '/api/status/clusters') clusters = [] for el in obj[\"clusters\"][\"active\"]: clusters.append(el[\"name\"]) return", "= get_json_object(url) consumers = obj[\"consumers\"] for consumer in consumers: for topic in consumer['topics']:", "for consumer in consumers: for topic in consumer['topics']: cons = consumer['name'] value =", "[] timestamp = int(time.time()) for cluster in clusters: url = base_url + '/api/status/'", "metrics.append(message) return metrics def send_graphite_metrics(message,graphite_host,graphite_port): print 'sending message:\\n%s' % message sock = socket.socket()", "= prepare_graphite_metrics(base_url,config.get('graphite','prefix')) message = '\\n' . join(bulk) + '\\n' send_graphite_metrics(message,config.get('graphite','host'),int(config.get('graphite','port'))) if __name__ ==", "-*- coding: utf-8 -*- import sys import requests import ConfigParser import time import", "= int(time.time()) for cluster in clusters: url = base_url + '/api/status/' + cluster", "+ '/api/status/' + cluster + '/consumersSummary' obj = get_json_object(url) consumers = obj[\"consumers\"] for", "clusters for url concatenate in future def get_clusters(base_url): obj = get_json_object(base_url + '/api/status/clusters')", "future def get_clusters(base_url): obj = get_json_object(base_url + '/api/status/clusters') clusters = [] for el", "graphite_prefix + cluster.replace('.','_') + '.' + consumer['type'] + '.' + cons.replace('.','_') + '.'", "topic in consumer['topics']: cons = consumer['name'] value = graphite_prefix + cluster.replace('.','_') + '.'", "get_json_object(base_url + '/api/status/clusters') clusters = [] for el in obj[\"clusters\"][\"active\"]: clusters.append(el[\"name\"]) return clusters", "obj = requests.get(url) except requests.exceptions.RequestException as e: print e sys.exit(1) return obj.json() #", "obj[\"clusters\"][\"active\"]: clusters.append(el[\"name\"]) return clusters def prepare_graphite_metrics(base_url,graphite_prefix): clusters = get_clusters(base_url) metrics = [] timestamp", "consumer['name'] value = graphite_prefix + cluster.replace('.','_') + '.' + consumer['type'] + '.' +" ]
[ "ignore the variable else: env_pkgs = [] # Add pkgs cmap_pkgs.update(env_pkgs) # Attempt", "def get_cmap(cmap): # Try to obtain the colormap from MPL try: cmap =", "from which colors should\" \" be taken\"), metavar=('LOWER', 'UPPER'), action='store', nargs=2, default=defaults['cmap_range'], type=float,", "action='store', type=int) # Set defaults for cmap_colors_parser cmap_colors_parser.set_defaults(func=cli_cmap_colors) # RGB_TABLE COMMAND # Add", "# CMAP_TYPE COMMAND # Add cmap_type subparser cmap_type_parser = subparsers.add_parser( 'cmtype', parents=[cmap_parent_parser], description=e13.get_main_desc(cmr.get_cmap_type),", "at semicolons if sys.platform.startswith('win'): env_pkgs = env_pkgs.split(';') # Else, if UNIX, split variable", "help of this action is required if action.help is not argparse.SUPPRESS: # Check", "and widths of the help texts help_position = min(self._action_max_length+2, self._max_help_position) help_width = max(self._width-help_position,", "from the command-line. \"\"\" # Initialize argparser parser = argparse.ArgumentParser( 'cmr', description=main_desc, formatter_class=HelpFormatterWithSubCommands,", "argparse from importlib import import_module import os import sys # Package imports import", "automatically extracts help strings of subcommands class HelpFormatterWithSubCommands(argparse.ArgumentDefaultsHelpFormatter): # Override the add_argument function", "remaining desc_lines for line in desc_lines: # Format and add to parts parts.append(\"%s%s\\n\"", "# If Windows, split variable at semicolons if sys.platform.startswith('win'): env_pkgs = env_pkgs.split(';') #", "the main function of the CLI and is called whenever `cmr` is invoked", "CLI and is called whenever `cmr` is invoked from the command-line. \"\"\" #", "# Add subparsers subparsers = parser.add_subparsers( title='commands', metavar='COMMAND') # OPTIONAL ARGUMENTS # Add", "v{}\".format(__version__)) # Create a cmap parser for several commands cmap_parent_parser = argparse.ArgumentParser(add_help=False) #", "sorted(action.choices.keys()) # Loop over all subcommands defined in the action for name in", "in ('int', '8bit'): np.savetxt(sys.stdout, colors, '%i') else: np.savetxt(sys.stdout, colors, '%s') # This function", "colormap from MPL try: cmap = mplcm.get_cmap(cmap) # If this does not work,", "Windows, split variable at semicolons if sys.platform.startswith('win'): env_pkgs = env_pkgs.split(';') # Else, if", "cmap_type_parser cmap_type_parser.set_defaults(func=cli_cmap_type) # CMAP_COLORS COMMAND # Obtain the optional default arguments of take_cmap_colors", "is required if action.help is not argparse.SUPPRESS: # Check if this action is", "of all parts of the description of this subcommand parts = [name, desc_lines.pop(0),", "':' in cmap: # Split cmap up into mod_name and obj_name mod_name, obj_name", "colons elif sys.platform.startswith(('darwin', 'linux')): env_pkgs = env_pkgs.split(':') # Else, ignore the variable else:", "# Add 'N' argument cmap_colors_parser.add_argument( 'ncolors', help=\"Number of colors to take\", metavar='N', action='store',", "np.savetxt(sys.stdout, colors, '%.8f') elif ARGS.return_fmt in ('int', '8bit'): np.savetxt(sys.stdout, colors, '%i') else: np.savetxt(sys.stdout,", "UNIX, split variable at colons elif sys.platform.startswith(('darwin', 'linux')): env_pkgs = env_pkgs.split(':') # Else,", "'UPPER'), action='store', nargs=2, default=defaults['cmap_range'], type=float, dest='cmap_range') # Add 'fmt' optional argument take_colors_parent_parser.add_argument( '--fmt',", "obj_path: cmap = getattr(cmap, obj) # If cmap is still a string, raise", "formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add 'cmap' argument mk_cmod_parser.add_argument( 'cmap', help=\"Name of *CMasher* colormap to", "CMAP_COLORS COMMAND # Obtain the optional default arguments of take_cmap_colors defaults = cmr.take_cmap_colors.__kwdefaults__", "Transform name to the proper formatting name = \"{0}{1: <{2}}{3}\".format( ' '*self._current_indent, name,", "action='store', nargs=2, default=defaults['cmap_range'], type=float, dest='cmap_range') # Add 'fmt' optional argument take_colors_parent_parser.add_argument( '--fmt', help=\"Format", "parser = argparse.ArgumentParser( 'cmr', description=main_desc, formatter_class=HelpFormatterWithSubCommands, add_help=True, allow_abbrev=True) # Add subparsers subparsers =", "import os import sys # Package imports import e13tools as e13 from matplotlib", "Set defaults for cmap_type_parser cmap_type_parser.set_defaults(func=cli_cmap_type) # CMAP_COLORS COMMAND # Obtain the optional default", "= argparse.ArgumentParser(add_help=False) # Add 'cmap_range' optional argument take_colors_parent_parser.add_argument( '--range', help=(\"Normalized value range in", "self._split_lines(description, help_width) # Create list of all parts of the description of this", "formatter that automatically extracts help strings of subcommands class HelpFormatterWithSubCommands(argparse.ArgumentDefaultsHelpFormatter): # Override the", "of this subcommand and add it self._add_item(self.format_subcommands, [name, subparser.description]) # Call super method", "variable at semicolons if sys.platform.startswith('win'): env_pkgs = env_pkgs.split(';') # Else, if UNIX, split", "subcommand parts = [name, desc_lines.pop(0), '\\n'] # Loop over all remaining desc_lines for", "over all subcommands defined in the action for name in names: # Obtain", "Initialize argparser parser = argparse.ArgumentParser( 'cmr', description=main_desc, formatter_class=HelpFormatterWithSubCommands, add_help=True, allow_abbrev=True) # Add subparsers", "names: # Obtain corresponding subparser subparser = action.choices[name] # Format the description of", "in the action for name in names: # Obtain corresponding subparser subparser =", "handles the 'cmap_type' subcommand def cli_cmap_type(): # Import cmap packages import_cmap_pkgs() # Print", "the 'bibtex' subcommand def cli_bibtex(): cmr.get_bibtex() # This function handles the 'cmap_type' subcommand", "FUNCTION DEFINITIONS # This function handles the 'bibtex' subcommand def cli_bibtex(): cmr.get_bibtex() #", "handles the 'mkcmod' subcommand def cli_mk_cmod(): # Create cmap module cmap_path = cmr.create_cmap_mod(ARGS.cmap,", "%r in %r.\" % (ARGS.cmap, cmap_path)) # %% FUNCTION DEFINITIONS # This function", "not work, try to expand given cmap in setuptools-style except ValueError: # Check", "env_pkgs = env_pkgs.split(';') # Else, if UNIX, split variable at colons elif sys.platform.startswith(('darwin',", "take_colors parser take_colors_parent_parser = argparse.ArgumentParser(add_help=False) # Add 'cmap_range' optional argument take_colors_parent_parser.add_argument( '--range', help=(\"Normalized", "provided), show help if 'func' not in ARGS: parser.print_help() # Else, call the", "optional default arguments of take_cmap_colors defaults = cmr.take_cmap_colors.__kwdefaults__ # Create a take_colors parser", "this package main_desc = (\"CMasher: Scientific colormaps for making accessible, informative\" \" and", "function def add_argument(self, action): # Check if the help of this action is", "description=e13.get_main_desc(cmr.get_cmap_type), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for cmap_type_parser cmap_type_parser.set_defaults(func=cli_cmap_type) # CMAP_COLORS COMMAND #", "the colormap from MPL try: cmap = mplcm.get_cmap(cmap) # If this does not", "help_width) # Create list of all parts of the description of this subcommand", "cli_cmap_type(): # Import cmap packages import_cmap_pkgs() # Print cmap type print(cmr.get_cmap_type(get_cmap(ARGS.cmap))) # This", "commandline that module has been created print(\"Created standalone colormap module of %r in", "to cmap_pkgs if it is not empty if env_pkgs is not None: #", "this subcommand parts = [name, desc_lines.pop(0), '\\n'] # Loop over all remaining desc_lines", "# Determine the positions and widths of the help texts help_position = min(self._action_max_length+2,", "line-by-line if ARGS.return_fmt in ('float', 'norm'): np.savetxt(sys.stdout, colors, '%.8f') elif ARGS.return_fmt in ('int',", "Add rgb_table subparser rgb_table_parser = subparsers.add_parser( 'rgbtable', parents=[cmap_parent_parser, take_colors_parent_parser], description=\"Retrieves the RGB values", "mk_cmod_parser = subparsers.add_parser( 'mkcmod', description=e13.get_main_desc(cmr.create_cmap_mod), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add 'cmap' argument mk_cmod_parser.add_argument( 'cmap',", "argparse.ArgumentParser( 'cmr', description=main_desc, formatter_class=HelpFormatterWithSubCommands, add_help=True, allow_abbrev=True) # Add subparsers subparsers = parser.add_subparsers( title='commands',", "description of this subcommand parts = [name, desc_lines.pop(0), '\\n'] # Loop over all", "all subcommands defined in the action for name in names: # Obtain corresponding", "help=(\"Name of colormap to use as registered in *matplotlib* or the \" \"object", "on commandline that module has been created print(\"Created standalone colormap module of %r", "isinstance(cmap, str): # Print error and exit print(\"Requested 'CMAP' ({!r}) cannot be found!\".format(cmap))", "the provided `cmap`.\", formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for rgb_table_parser rgb_table_parser.set_defaults(func=cli_cmap_colors, ncolors=None) #", "line in desc_lines: # Format and add to parts parts.append(\"%s%s\\n\" % (' '*help_position,", "= subparsers.add_parser( 'mkcmod', description=e13.get_main_desc(cmr.create_cmap_mod), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add 'cmap' argument mk_cmod_parser.add_argument( 'cmap', help=\"Name", "Define main description of this package main_desc = (\"CMasher: Scientific colormaps for making", "packages import_cmap_pkgs() # Obtain the colors colors = cmr.take_cmap_colors(get_cmap(ARGS.cmap), ARGS.ncolors, cmap_range=ARGS.cmap_range, return_fmt=ARGS.return_fmt) #", "and 'cmashing' plots\") # %% CLASS DEFINITIONS # Define formatter that automatically extracts", "Add 'version' argument parser.add_argument( '-v', '--version', action='version', version=\"CMasher v{}\".format(__version__)) # Create a cmap", "# This function handles the 'bibtex' subcommand def cli_bibtex(): cmr.get_bibtex() # This function", "should\" \" be taken\"), metavar=('LOWER', 'UPPER'), action='store', nargs=2, default=defaults['cmap_range'], type=float, dest='cmap_range') # Add", "extracts help strings of subcommands class HelpFormatterWithSubCommands(argparse.ArgumentDefaultsHelpFormatter): # Override the add_argument function def", "Import the provided module as cmap cmap = import_module(mod_name) # Import the provided", "cmap_pkgs.update(env_pkgs) # Attempt to import each package for cmap_pkg in cmap_pkgs: try: import_module(cmap_pkg)", "type=str) # BIBTEX COMMAND # Add bibtex subparser bibtex_parser = subparsers.add_parser( 'bibtex', description=e13.get_main_desc(cmr.get_bibtex),", "'8bit', 'str', 'hex'], type=str, dest='return_fmt') # Add cmap_colors subparser cmap_colors_parser = subparsers.add_parser( 'cmcolors',", "parts.append(\"%s%s\\n\" % (' '*help_position, line)) # Convert to a single string and return", "that was requested def get_cmap(cmap): # Try to obtain the colormap from MPL", "ARGS.ncolors, cmap_range=ARGS.cmap_range, return_fmt=ARGS.return_fmt) # Print the colors line-by-line if ARGS.return_fmt in ('float', 'norm'):", "'cmcolors', parents=[cmap_parent_parser, take_colors_parent_parser], description=e13.get_main_desc(cmr.take_cmap_colors), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add 'N' argument cmap_colors_parser.add_argument( 'ncolors', help=\"Number", "# Check if the help of this action is required if action.help is", "of colors to take\", metavar='N', action='store', type=int) # Set defaults for cmap_colors_parser cmap_colors_parser.set_defaults(func=cli_cmap_colors)", "importlib import import_module import os import sys # Package imports import e13tools as", "of the description of this subcommand parts = [name, desc_lines.pop(0), '\\n'] # Loop", "and add to parts parts.append(\"%s%s\\n\" % (' '*help_position, line)) # Convert to a", "# Define set of packages with colormaps cmap_pkgs = {'cmocean', 'colorcet', 'palettable'} #", "take\", metavar='N', action='store', type=int) # Set defaults for cmap_colors_parser cmap_colors_parser.set_defaults(func=cli_cmap_colors) # RGB_TABLE COMMAND", "colors, '%.8f') elif ARGS.return_fmt in ('int', '8bit'): np.savetxt(sys.stdout, colors, '%i') else: np.savetxt(sys.stdout, colors,", "from matplotlib import cm as mplcm import numpy as np # CMasher imports", "11) name_width = help_position-self._current_indent-2 # Transform name to the proper formatting name =", "cmap_pkg in cmap_pkgs: try: import_module(cmap_pkg) except ImportError: pass # %% MAIN FUNCTION def", "' if(len(name) <= name_width) else '\\n'+' '*help_position) # Split the lines of the", "is still a string, raise error if isinstance(cmap, str): # Print error and", "MK_CMOD COMMAND # Add mk_cmod subparser mk_cmod_parser = subparsers.add_parser( 'mkcmod', description=e13.get_main_desc(cmr.create_cmap_mod), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True)", "add it self._add_item(self.format_subcommands, [name, subparser.description]) # Call super method in all other cases", "# RGB_TABLE COMMAND # Add rgb_table subparser rgb_table_parser = subparsers.add_parser( 'rgbtable', parents=[cmap_parent_parser, take_colors_parent_parser],", "'cmap' argument cmap_parent_parser.add_argument( 'cmap', help=(\"Name of colormap to use as registered in *matplotlib*", "sys # Package imports import e13tools as e13 from matplotlib import cm as", "'cmap', help=(\"Name of colormap to use as registered in *matplotlib* or the \"", "module must be saved\", action='store', default=cmr.create_cmap_mod.__kwdefaults__['save_dir'], type=str) # Set defaults for mk_cmod_parser mk_cmod_parser.set_defaults(func=cli_mk_cmod)", "'colorcet', 'palettable'} # Obtain packages from CMR_CMAP_PKGS environment variable env_pkgs = os.environ.get('CMR_CMAP_PKGS', None)", "This function handles the 'cmap_type' subcommand def cli_cmap_type(): # Import cmap packages import_cmap_pkgs()", "called whenever `cmr` is invoked from the command-line. \"\"\" # Initialize argparser parser", "rgb_table_parser = subparsers.add_parser( 'rgbtable', parents=[cmap_parent_parser, take_colors_parent_parser], description=\"Retrieves the RGB values of the provided", "subcommands defined in the action for name in names: # Obtain corresponding subparser", "# Import the provided object from this module for obj in obj_path: cmap", "Add env_pkgs to cmap_pkgs if it is not empty if env_pkgs is not", "type=str) # Set defaults for mk_cmod_parser mk_cmod_parser.set_defaults(func=cli_mk_cmod) # Parse the arguments global ARGS", "in names: # Obtain corresponding subparser subparser = action.choices[name] # Format the description", "help_position-self._current_indent-2 # Transform name to the proper formatting name = \"{0}{1: <{2}}{3}\".format( '", "main_desc = (\"CMasher: Scientific colormaps for making accessible, informative\" \" and 'cmashing' plots\")", "if env_pkgs is not None: # If Windows, split variable at semicolons if", "lines of the subcommand description desc_lines = self._split_lines(description, help_width) # Create list of", "this action is a subparser's action if isinstance(action, argparse._SubParsersAction): # If so, sort", "function handles the 'bibtex' subcommand def cli_bibtex(): cmr.get_bibtex() # This function handles the", "Format and add to parts parts.append(\"%s%s\\n\" % (' '*help_position, line)) # Convert to", "getattr(cmap, obj) # If cmap is still a string, raise error if isinstance(cmap,", "parser take_colors_parent_parser = argparse.ArgumentParser(add_help=False) # Add 'cmap_range' optional argument take_colors_parent_parser.add_argument( '--range', help=(\"Normalized value", "# Initialize argparser parser = argparse.ArgumentParser( 'cmr', description=main_desc, formatter_class=HelpFormatterWithSubCommands, add_help=True, allow_abbrev=True) # Add", "# Create a take_colors parser take_colors_parent_parser = argparse.ArgumentParser(add_help=False) # Add 'cmap_range' optional argument", "1) obj_path = obj_name.split('.') # Import the provided module as cmap cmap =", "colormap to create standalone module for\", metavar='CMAP', action='store', type=str) # Add 'dir' optional", "take_colors_parent_parser], description=\"Retrieves the RGB values of the provided `cmap`.\", formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set", "Set defaults for mk_cmod_parser mk_cmod_parser.set_defaults(func=cli_mk_cmod) # Parse the arguments global ARGS ARGS =", "import_module import os import sys # Package imports import e13tools as e13 from", "# Add rgb_table subparser rgb_table_parser = subparsers.add_parser( 'rgbtable', parents=[cmap_parent_parser, take_colors_parent_parser], description=\"Retrieves the RGB", "in %r.\" % (ARGS.cmap, cmap_path)) # %% FUNCTION DEFINITIONS # This function obtains", "# Transform name to the proper formatting name = \"{0}{1: <{2}}{3}\".format( ' '*self._current_indent,", "to import each package for cmap_pkg in cmap_pkgs: try: import_module(cmap_pkg) except ImportError: pass", "main description of this package main_desc = (\"CMasher: Scientific colormaps for making accessible,", "# Check if this action is a subparser's action if isinstance(action, argparse._SubParsersAction): #", "packages with colormaps def import_cmap_pkgs(): # Define set of packages with colormaps cmap_pkgs", "# Add pkgs cmap_pkgs.update(env_pkgs) # Attempt to import each package for cmap_pkg in", "metavar='CMAP', action='store', type=str) # BIBTEX COMMAND # Add bibtex subparser bibtex_parser = subparsers.add_parser(", "take_colors_parent_parser.add_argument( '--range', help=(\"Normalized value range in the colormap from which colors should\" \"", "Add 'fmt' optional argument take_colors_parent_parser.add_argument( '--fmt', help=\"Format to return colors in\", action='store', default=defaults['return_fmt'],", "# BIBTEX COMMAND # Add bibtex subparser bibtex_parser = subparsers.add_parser( 'bibtex', description=e13.get_main_desc(cmr.get_bibtex), formatter_class=argparse.ArgumentDefaultsHelpFormatter,", "mod_name and obj_name mod_name, obj_name = cmap.split(':', 1) obj_path = obj_name.split('.') # Import", "single string and return return(''.join(parts)) # %% COMMAND FUNCTION DEFINITIONS # This function", "def format_subcommands(self, name, description): # Determine the positions and widths of the help", "# %% COMMAND FUNCTION DEFINITIONS # This function handles the 'bibtex' subcommand def", "colors to take\", metavar='N', action='store', type=int) # Set defaults for cmap_colors_parser cmap_colors_parser.set_defaults(func=cli_cmap_colors) #", "a colon if ':' in cmap: # Split cmap up into mod_name and", "Create a cmap parser for several commands cmap_parent_parser = argparse.ArgumentParser(add_help=False) # Add 'cmap'", "env_pkgs to cmap_pkgs if it is not empty if env_pkgs is not None:", "if 'func' not in ARGS: parser.print_help() # Else, call the corresponding function else:", "in desc_lines: # Format and add to parts parts.append(\"%s%s\\n\" % (' '*help_position, line))", "Add subparsers subparsers = parser.add_subparsers( title='commands', metavar='COMMAND') # OPTIONAL ARGUMENTS # Add 'version'", "handles the 'bibtex' subcommand def cli_bibtex(): cmr.get_bibtex() # This function handles the 'cmap_type'", "# Add env_pkgs to cmap_pkgs if it is not empty if env_pkgs is", "cmap = import_module(mod_name) # Import the provided object from this module for obj", "provided object from this module for obj in obj_path: cmap = getattr(cmap, obj)", "or the \" \"object path of a colormap (e.g., 'a.b:c.d' -> import a.b;", "# This function handles the 'mkcmod' subcommand def cli_mk_cmod(): # Create cmap module", "a subcommand with given name def format_subcommands(self, name, description): # Determine the positions", "def add_argument(self, action): # Check if the help of this action is required", "for cmap_colors_parser cmap_colors_parser.set_defaults(func=cli_cmap_colors) # RGB_TABLE COMMAND # Add rgb_table subparser rgb_table_parser = subparsers.add_parser(", "type=str) # Add 'dir' optional argument mk_cmod_parser.add_argument( '-d', '--dir', help=\"Path to directory where", "the colormap from which colors should\" \" be taken\"), metavar=('LOWER', 'UPPER'), action='store', nargs=2,", "% (ARGS.cmap, cmap_path)) # %% FUNCTION DEFINITIONS # This function obtains the colormap", "cmap is still a string, raise error if isinstance(cmap, str): # Print error", "if isinstance(cmap, str): # Print error and exit print(\"Requested 'CMAP' ({!r}) cannot be", "a cmap parser for several commands cmap_parent_parser = argparse.ArgumentParser(add_help=False) # Add 'cmap' argument", "Check if this action is a subparser's action if isinstance(action, argparse._SubParsersAction): # If", "bibtex_parser = subparsers.add_parser( 'bibtex', description=e13.get_main_desc(cmr.get_bibtex), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for bibtex_parser bibtex_parser.set_defaults(func=cli_bibtex)", "sort action.choices on name names = sorted(action.choices.keys()) # Loop over all subcommands defined", "'version' argument parser.add_argument( '-v', '--version', action='version', version=\"CMasher v{}\".format(__version__)) # Create a cmap parser", "help texts help_position = min(self._action_max_length+2, self._max_help_position) help_width = max(self._width-help_position, 11) name_width = help_position-self._current_indent-2", "import a.b; \" \"cmap = a.b.c.d)\"), metavar='CMAP', action='store', type=str) # BIBTEX COMMAND #", "not empty if env_pkgs is not None: # If Windows, split variable at", "'a.b:c.d' -> import a.b; \" \"cmap = a.b.c.d)\"), metavar='CMAP', action='store', type=str) # BIBTEX", "over all remaining desc_lines for line in desc_lines: # Format and add to", "= cmr.create_cmap_mod(ARGS.cmap, save_dir=ARGS.dir) # Print on commandline that module has been created print(\"Created", "module for obj in obj_path: cmap = getattr(cmap, obj) # If cmap is", "a string, raise error if isinstance(cmap, str): # Print error and exit print(\"Requested", "env_pkgs = os.environ.get('CMR_CMAP_PKGS', None) # Add env_pkgs to cmap_pkgs if it is not", "function attempts to import a collection of packages with colormaps def import_cmap_pkgs(): #", "take_colors_parent_parser], description=e13.get_main_desc(cmr.take_cmap_colors), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add 'N' argument cmap_colors_parser.add_argument( 'ncolors', help=\"Number of colors", "Set defaults for rgb_table_parser rgb_table_parser.set_defaults(func=cli_cmap_colors, ncolors=None) # MK_CMOD COMMAND # Add mk_cmod subparser", "cmap = mplcm.get_cmap(cmap) # If this does not work, try to expand given", "add_help=True, allow_abbrev=True) # Add subparsers subparsers = parser.add_subparsers( title='commands', metavar='COMMAND') # OPTIONAL ARGUMENTS", "string and return return(''.join(parts)) # %% COMMAND FUNCTION DEFINITIONS # This function handles", "formatting name = \"{0}{1: <{2}}{3}\".format( ' '*self._current_indent, name, name_width, ' ' if(len(name) <=", "name to the proper formatting name = \"{0}{1: <{2}}{3}\".format( ' '*self._current_indent, name, name_width,", "import cmasher as cmr # All declaration __all__ = ['main'] # %% GLOBALS", "import a collection of packages with colormaps def import_cmap_pkgs(): # Define set of", "to parts parts.append(\"%s%s\\n\" % (' '*help_position, line)) # Convert to a single string", "of packages with colormaps def import_cmap_pkgs(): # Define set of packages with colormaps", "FUNCTION DEFINITIONS # This function obtains the colormap that was requested def get_cmap(cmap):", "'N' argument cmap_colors_parser.add_argument( 'ncolors', help=\"Number of colors to take\", metavar='N', action='store', type=int) #", "%% COMMAND FUNCTION DEFINITIONS # This function handles the 'bibtex' subcommand def cli_bibtex():", "Obtain corresponding subparser subparser = action.choices[name] # Format the description of this subcommand", "of a colormap (e.g., 'a.b:c.d' -> import a.b; \" \"cmap = a.b.c.d)\"), metavar='CMAP',", "except ValueError: # Check if cmap contains a colon if ':' in cmap:", "cm as mplcm import numpy as np # CMasher imports from cmasher import", "= min(self._action_max_length+2, self._max_help_position) help_width = max(self._width-help_position, 11) name_width = help_position-self._current_indent-2 # Transform name", "# If this does not work, try to expand given cmap in setuptools-style", "of take_cmap_colors defaults = cmr.take_cmap_colors.__kwdefaults__ # Create a take_colors parser take_colors_parent_parser = argparse.ArgumentParser(add_help=False)", "= \"{0}{1: <{2}}{3}\".format( ' '*self._current_indent, name, name_width, ' ' if(len(name) <= name_width) else", "cmasher as cmr # All declaration __all__ = ['main'] # %% GLOBALS #", "name, description): # Determine the positions and widths of the help texts help_position", "other cases else: super().add_argument(action) # This function formats the description of a subcommand", "and obj_name mod_name, obj_name = cmap.split(':', 1) obj_path = obj_name.split('.') # Import the", "Add 'cmap_range' optional argument take_colors_parent_parser.add_argument( '--range', help=(\"Normalized value range in the colormap from", "accessible, informative\" \" and 'cmashing' plots\") # %% CLASS DEFINITIONS # Define formatter", "self._add_item(self.format_subcommands, [name, subparser.description]) # Call super method in all other cases else: super().add_argument(action)", "'mkcmod' subcommand def cli_mk_cmod(): # Create cmap module cmap_path = cmr.create_cmap_mod(ARGS.cmap, save_dir=ARGS.dir) #", "of this subcommand parts = [name, desc_lines.pop(0), '\\n'] # Loop over all remaining", "of %r in %r.\" % (ARGS.cmap, cmap_path)) # %% FUNCTION DEFINITIONS # This", "as cmap cmap = import_module(mod_name) # Import the provided object from this module", "of a subcommand with given name def format_subcommands(self, name, description): # Determine the", "ValueError: # Check if cmap contains a colon if ':' in cmap: #", "mk_cmod_parser.add_argument( 'cmap', help=\"Name of *CMasher* colormap to create standalone module for\", metavar='CMAP', action='store',", "for making accessible, informative\" \" and 'cmashing' plots\") # %% CLASS DEFINITIONS #", "'%.8f') elif ARGS.return_fmt in ('int', '8bit'): np.savetxt(sys.stdout, colors, '%i') else: np.savetxt(sys.stdout, colors, '%s')", "subparsers.add_parser( 'mkcmod', description=e13.get_main_desc(cmr.create_cmap_mod), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add 'cmap' argument mk_cmod_parser.add_argument( 'cmap', help=\"Name of", "create standalone module for\", metavar='CMAP', action='store', type=str) # Add 'dir' optional argument mk_cmod_parser.add_argument(", "Format the description of this subcommand and add it self._add_item(self.format_subcommands, [name, subparser.description]) #", "must be saved\", action='store', default=cmr.create_cmap_mod.__kwdefaults__['save_dir'], type=str) # Set defaults for mk_cmod_parser mk_cmod_parser.set_defaults(func=cli_mk_cmod) #", "This function handles the 'take_cmap_colors' subcommand def cli_cmap_colors(): # Import cmap packages import_cmap_pkgs()", "# Add 'version' argument parser.add_argument( '-v', '--version', action='version', version=\"CMasher v{}\".format(__version__)) # Create a", "numpy as np # CMasher imports from cmasher import __version__ import cmasher as", "' '*self._current_indent, name, name_width, ' ' if(len(name) <= name_width) else '\\n'+' '*help_position) #", "widths of the help texts help_position = min(self._action_max_length+2, self._max_help_position) help_width = max(self._width-help_position, 11)", "take_colors_parent_parser.add_argument( '--fmt', help=\"Format to return colors in\", action='store', default=defaults['return_fmt'], choices=['float', 'norm', 'int', '8bit',", "Set defaults for cmap_colors_parser cmap_colors_parser.set_defaults(func=cli_cmap_colors) # RGB_TABLE COMMAND # Add rgb_table subparser rgb_table_parser", "e13 from matplotlib import cm as mplcm import numpy as np # CMasher", "(\"CMasher: Scientific colormaps for making accessible, informative\" \" and 'cmashing' plots\") # %%", "cmap type print(cmr.get_cmap_type(get_cmap(ARGS.cmap))) # This function handles the 'take_cmap_colors' subcommand def cli_cmap_colors(): #", "argparse._SubParsersAction): # If so, sort action.choices on name names = sorted(action.choices.keys()) # Loop", "still a string, raise error if isinstance(cmap, str): # Print error and exit", "positions and widths of the help texts help_position = min(self._action_max_length+2, self._max_help_position) help_width =", "the colormap that was requested def get_cmap(cmap): # Try to obtain the colormap", "action for name in names: # Obtain corresponding subparser subparser = action.choices[name] #", "\" and 'cmashing' plots\") # %% CLASS DEFINITIONS # Define formatter that automatically", "the variable else: env_pkgs = [] # Add pkgs cmap_pkgs.update(env_pkgs) # Attempt to", "cmap_path = cmr.create_cmap_mod(ARGS.cmap, save_dir=ARGS.dir) # Print on commandline that module has been created", "Package imports import e13tools as e13 from matplotlib import cm as mplcm import", "action): # Check if the help of this action is required if action.help", "the provided object from this module for obj in obj_path: cmap = getattr(cmap,", "argument cmap_parent_parser.add_argument( 'cmap', help=(\"Name of colormap to use as registered in *matplotlib* or", "subparser = action.choices[name] # Format the description of this subcommand and add it", "# Loop over all subcommands defined in the action for name in names:", "desc_lines = self._split_lines(description, help_width) # Create list of all parts of the description", "defaults for cmap_type_parser cmap_type_parser.set_defaults(func=cli_cmap_type) # CMAP_COLORS COMMAND # Obtain the optional default arguments", "and add it self._add_item(self.format_subcommands, [name, subparser.description]) # Call super method in all other", "(e.g., 'a.b:c.d' -> import a.b; \" \"cmap = a.b.c.d)\"), metavar='CMAP', action='store', type=str) #", "names = sorted(action.choices.keys()) # Loop over all subcommands defined in the action for", "if ':' in cmap: # Split cmap up into mod_name and obj_name mod_name,", "'--range', help=(\"Normalized value range in the colormap from which colors should\" \" be", "description desc_lines = self._split_lines(description, help_width) # Create list of all parts of the", "# Else, ignore the variable else: env_pkgs = [] # Add pkgs cmap_pkgs.update(env_pkgs)", "'--fmt', help=\"Format to return colors in\", action='store', default=defaults['return_fmt'], choices=['float', 'norm', 'int', '8bit', 'str',", "'bibtex' subcommand def cli_bibtex(): cmr.get_bibtex() # This function handles the 'cmap_type' subcommand def", "add_argument function def add_argument(self, action): # Check if the help of this action", "cli_bibtex(): cmr.get_bibtex() # This function handles the 'cmap_type' subcommand def cli_cmap_type(): # Import", "cmap parser for several commands cmap_parent_parser = argparse.ArgumentParser(add_help=False) # Add 'cmap' argument cmap_parent_parser.add_argument(", "# Built-in imports import argparse from importlib import import_module import os import sys", "parents=[cmap_parent_parser, take_colors_parent_parser], description=\"Retrieves the RGB values of the provided `cmap`.\", formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) #", "a single string and return return(''.join(parts)) # %% COMMAND FUNCTION DEFINITIONS # This", "('int', '8bit'): np.savetxt(sys.stdout, colors, '%i') else: np.savetxt(sys.stdout, colors, '%s') # This function handles", "'bibtex', description=e13.get_main_desc(cmr.get_bibtex), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for bibtex_parser bibtex_parser.set_defaults(func=cli_bibtex) # CMAP_TYPE COMMAND", "to return colors in\", action='store', default=defaults['return_fmt'], choices=['float', 'norm', 'int', '8bit', 'str', 'hex'], type=str,", "the RGB values of the provided `cmap`.\", formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for", "so, sort action.choices on name names = sorted(action.choices.keys()) # Loop over all subcommands", "'norm'): np.savetxt(sys.stdout, colors, '%.8f') elif ARGS.return_fmt in ('int', '8bit'): np.savetxt(sys.stdout, colors, '%i') else:", "if cmap contains a colon if ':' in cmap: # Split cmap up", "`cmr` is invoked from the command-line. \"\"\" # Initialize argparser parser = argparse.ArgumentParser(", "taken\"), metavar=('LOWER', 'UPPER'), action='store', nargs=2, default=defaults['cmap_range'], type=float, dest='cmap_range') # Add 'fmt' optional argument", "is not empty if env_pkgs is not None: # If Windows, split variable", "import import_module import os import sys # Package imports import e13tools as e13", "= getattr(cmap, obj) # If cmap is still a string, raise error if", "ncolors=None) # MK_CMOD COMMAND # Add mk_cmod subparser mk_cmod_parser = subparsers.add_parser( 'mkcmod', description=e13.get_main_desc(cmr.create_cmap_mod),", "IMPORTS # Built-in imports import argparse from importlib import import_module import os import", "else '\\n'+' '*help_position) # Split the lines of the subcommand description desc_lines =", "(ARGS.cmap, cmap_path)) # %% FUNCTION DEFINITIONS # This function obtains the colormap that", "DEFINITIONS # This function handles the 'bibtex' subcommand def cli_bibtex(): cmr.get_bibtex() # This", "subparsers subparsers = parser.add_subparsers( title='commands', metavar='COMMAND') # OPTIONAL ARGUMENTS # Add 'version' argument", "rgb_table_parser.set_defaults(func=cli_cmap_colors, ncolors=None) # MK_CMOD COMMAND # Add mk_cmod subparser mk_cmod_parser = subparsers.add_parser( 'mkcmod',", "class HelpFormatterWithSubCommands(argparse.ArgumentDefaultsHelpFormatter): # Override the add_argument function def add_argument(self, action): # Check if", "it self._add_item(self.format_subcommands, [name, subparser.description]) # Call super method in all other cases else:", "super().add_argument(action) # This function formats the description of a subcommand with given name", "CLASS DEFINITIONS # Define formatter that automatically extracts help strings of subcommands class", "# Return cmap return(cmap) # This function attempts to import a collection of", "choices=['float', 'norm', 'int', '8bit', 'str', 'hex'], type=str, dest='return_fmt') # Add cmap_colors subparser cmap_colors_parser", "cmr # All declaration __all__ = ['main'] # %% GLOBALS # Define main", "'mkcmod', description=e13.get_main_desc(cmr.create_cmap_mod), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add 'cmap' argument mk_cmod_parser.add_argument( 'cmap', help=\"Name of *CMasher*", "to use as registered in *matplotlib* or the \" \"object path of a", "for cmap_pkg in cmap_pkgs: try: import_module(cmap_pkg) except ImportError: pass # %% MAIN FUNCTION", "empty if env_pkgs is not None: # If Windows, split variable at semicolons", "# Convert to a single string and return return(''.join(parts)) # %% COMMAND FUNCTION", "line)) # Convert to a single string and return return(''.join(parts)) # %% COMMAND", "range in the colormap from which colors should\" \" be taken\"), metavar=('LOWER', 'UPPER'),", "<filename>cmasher/cli_tools.py # -*- coding: utf-8 -*- # %% IMPORTS # Built-in imports import", "'str', 'hex'], type=str, dest='return_fmt') # Add cmap_colors subparser cmap_colors_parser = subparsers.add_parser( 'cmcolors', parents=[cmap_parent_parser,", "'--dir', help=\"Path to directory where the module must be saved\", action='store', default=cmr.create_cmap_mod.__kwdefaults__['save_dir'], type=str)", "cmr.create_cmap_mod(ARGS.cmap, save_dir=ARGS.dir) # Print on commandline that module has been created print(\"Created standalone", "the command-line. \"\"\" # Initialize argparser parser = argparse.ArgumentParser( 'cmr', description=main_desc, formatter_class=HelpFormatterWithSubCommands, add_help=True,", "defined in the action for name in names: # Obtain corresponding subparser subparser", "is not argparse.SUPPRESS: # Check if this action is a subparser's action if", "a subparser's action if isinstance(action, argparse._SubParsersAction): # If so, sort action.choices on name", "Create a take_colors parser take_colors_parent_parser = argparse.ArgumentParser(add_help=False) # Add 'cmap_range' optional argument take_colors_parent_parser.add_argument(", "arguments is empty (no func was provided), show help if 'func' not in", "# Add mk_cmod subparser mk_cmod_parser = subparsers.add_parser( 'mkcmod', description=e13.get_main_desc(cmr.create_cmap_mod), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add", "colors line-by-line if ARGS.return_fmt in ('float', 'norm'): np.savetxt(sys.stdout, colors, '%.8f') elif ARGS.return_fmt in", "<= name_width) else '\\n'+' '*help_position) # Split the lines of the subcommand description", "provided module as cmap cmap = import_module(mod_name) # Import the provided object from", "cmap_pkgs = {'cmocean', 'colorcet', 'palettable'} # Obtain packages from CMR_CMAP_PKGS environment variable env_pkgs", "# Add 'cmap' argument mk_cmod_parser.add_argument( 'cmap', help=\"Name of *CMasher* colormap to create standalone", "'CMAP' ({!r}) cannot be found!\".format(cmap)) sys.exit() # Return cmap return(cmap) # This function", "'cmap_type' subcommand def cli_cmap_type(): # Import cmap packages import_cmap_pkgs() # Print cmap type", "of the subcommand description desc_lines = self._split_lines(description, help_width) # Create list of all", "action='store', default=cmr.create_cmap_mod.__kwdefaults__['save_dir'], type=str) # Set defaults for mk_cmod_parser mk_cmod_parser.set_defaults(func=cli_mk_cmod) # Parse the arguments", "def cli_cmap_colors(): # Import cmap packages import_cmap_pkgs() # Obtain the colors colors =", "is empty (no func was provided), show help if 'func' not in ARGS:", "desc_lines.pop(0), '\\n'] # Loop over all remaining desc_lines for line in desc_lines: #", "try: cmap = mplcm.get_cmap(cmap) # If this does not work, try to expand", "max(self._width-help_position, 11) name_width = help_position-self._current_indent-2 # Transform name to the proper formatting name", "argparse.ArgumentParser(add_help=False) # Add 'cmap_range' optional argument take_colors_parent_parser.add_argument( '--range', help=(\"Normalized value range in the", "Check if cmap contains a colon if ':' in cmap: # Split cmap", "# Define main description of this package main_desc = (\"CMasher: Scientific colormaps for", "to a single string and return return(''.join(parts)) # %% COMMAND FUNCTION DEFINITIONS #", "CMasher imports from cmasher import __version__ import cmasher as cmr # All declaration", "colors = cmr.take_cmap_colors(get_cmap(ARGS.cmap), ARGS.ncolors, cmap_range=ARGS.cmap_range, return_fmt=ARGS.return_fmt) # Print the colors line-by-line if ARGS.return_fmt", "cmap = getattr(cmap, obj) # If cmap is still a string, raise error", "'cmap' argument mk_cmod_parser.add_argument( 'cmap', help=\"Name of *CMasher* colormap to create standalone module for\",", "description of this package main_desc = (\"CMasher: Scientific colormaps for making accessible, informative\"", "the help texts help_position = min(self._action_max_length+2, self._max_help_position) help_width = max(self._width-help_position, 11) name_width =", "the 'cmap_type' subcommand def cli_cmap_type(): # Import cmap packages import_cmap_pkgs() # Print cmap", "Add pkgs cmap_pkgs.update(env_pkgs) # Attempt to import each package for cmap_pkg in cmap_pkgs:", "# All declaration __all__ = ['main'] # %% GLOBALS # Define main description", "*CMasher* colormap to create standalone module for\", metavar='CMAP', action='store', type=str) # Add 'dir'", "cmap: # Split cmap up into mod_name and obj_name mod_name, obj_name = cmap.split(':',", "'%s') # This function handles the 'mkcmod' subcommand def cli_mk_cmod(): # Create cmap", "Add 'dir' optional argument mk_cmod_parser.add_argument( '-d', '--dir', help=\"Path to directory where the module", "of the provided `cmap`.\", formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for rgb_table_parser rgb_table_parser.set_defaults(func=cli_cmap_colors, ncolors=None)", "# Print error and exit print(\"Requested 'CMAP' ({!r}) cannot be found!\".format(cmap)) sys.exit() #", "cmr.take_cmap_colors.__kwdefaults__ # Create a take_colors parser take_colors_parent_parser = argparse.ArgumentParser(add_help=False) # Add 'cmap_range' optional", "Add 'N' argument cmap_colors_parser.add_argument( 'ncolors', help=\"Number of colors to take\", metavar='N', action='store', type=int)", "function of the CLI and is called whenever `cmr` is invoked from the", "# Add bibtex subparser bibtex_parser = subparsers.add_parser( 'bibtex', description=e13.get_main_desc(cmr.get_bibtex), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set", "DEFINITIONS # This function obtains the colormap that was requested def get_cmap(cmap): #", "= parser.add_subparsers( title='commands', metavar='COMMAND') # OPTIONAL ARGUMENTS # Add 'version' argument parser.add_argument( '-v',", "<{2}}{3}\".format( ' '*self._current_indent, name, name_width, ' ' if(len(name) <= name_width) else '\\n'+' '*help_position)", "*matplotlib* or the \" \"object path of a colormap (e.g., 'a.b:c.d' -> import", "ARGS = parser.parse_args() # If arguments is empty (no func was provided), show", "= os.environ.get('CMR_CMAP_PKGS', None) # Add env_pkgs to cmap_pkgs if it is not empty", "name, name_width, ' ' if(len(name) <= name_width) else '\\n'+' '*help_position) # Split the", "the 'take_cmap_colors' subcommand def cli_cmap_colors(): # Import cmap packages import_cmap_pkgs() # Obtain the", "split variable at colons elif sys.platform.startswith(('darwin', 'linux')): env_pkgs = env_pkgs.split(':') # Else, ignore", "if sys.platform.startswith('win'): env_pkgs = env_pkgs.split(';') # Else, if UNIX, split variable at colons", "subcommand def cli_bibtex(): cmr.get_bibtex() # This function handles the 'cmap_type' subcommand def cli_cmap_type():", "HelpFormatterWithSubCommands(argparse.ArgumentDefaultsHelpFormatter): # Override the add_argument function def add_argument(self, action): # Check if the", "colormaps cmap_pkgs = {'cmocean', 'colorcet', 'palettable'} # Obtain packages from CMR_CMAP_PKGS environment variable", "'\\n'] # Loop over all remaining desc_lines for line in desc_lines: # Format", "Add cmap_type subparser cmap_type_parser = subparsers.add_parser( 'cmtype', parents=[cmap_parent_parser], description=e13.get_main_desc(cmr.get_cmap_type), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set", "COMMAND # Add rgb_table subparser rgb_table_parser = subparsers.add_parser( 'rgbtable', parents=[cmap_parent_parser, take_colors_parent_parser], description=\"Retrieves the", "to import a collection of packages with colormaps def import_cmap_pkgs(): # Define set", "of this action is required if action.help is not argparse.SUPPRESS: # Check if", "into mod_name and obj_name mod_name, obj_name = cmap.split(':', 1) obj_path = obj_name.split('.') #", "expand given cmap in setuptools-style except ValueError: # Check if cmap contains a", "'cmtype', parents=[cmap_parent_parser], description=e13.get_main_desc(cmr.get_cmap_type), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for cmap_type_parser cmap_type_parser.set_defaults(func=cli_cmap_type) # CMAP_COLORS", "pass # %% MAIN FUNCTION def main(): \"\"\" This is the main function", "error if isinstance(cmap, str): # Print error and exit print(\"Requested 'CMAP' ({!r}) cannot", "= {'cmocean', 'colorcet', 'palettable'} # Obtain packages from CMR_CMAP_PKGS environment variable env_pkgs =", "Import cmap packages import_cmap_pkgs() # Obtain the colors colors = cmr.take_cmap_colors(get_cmap(ARGS.cmap), ARGS.ncolors, cmap_range=ARGS.cmap_range,", "else: env_pkgs = [] # Add pkgs cmap_pkgs.update(env_pkgs) # Attempt to import each", "where the module must be saved\", action='store', default=cmr.create_cmap_mod.__kwdefaults__['save_dir'], type=str) # Set defaults for", "= a.b.c.d)\"), metavar='CMAP', action='store', type=str) # BIBTEX COMMAND # Add bibtex subparser bibtex_parser", "help strings of subcommands class HelpFormatterWithSubCommands(argparse.ArgumentDefaultsHelpFormatter): # Override the add_argument function def add_argument(self,", "action.choices on name names = sorted(action.choices.keys()) # Loop over all subcommands defined in", "subparser.description]) # Call super method in all other cases else: super().add_argument(action) # This", "the proper formatting name = \"{0}{1: <{2}}{3}\".format( ' '*self._current_indent, name, name_width, ' '", "np.savetxt(sys.stdout, colors, '%s') # This function handles the 'mkcmod' subcommand def cli_mk_cmod(): #", "# Create cmap module cmap_path = cmr.create_cmap_mod(ARGS.cmap, save_dir=ARGS.dir) # Print on commandline that", "If cmap is still a string, raise error if isinstance(cmap, str): # Print", "is not None: # If Windows, split variable at semicolons if sys.platform.startswith('win'): env_pkgs", "invoked from the command-line. \"\"\" # Initialize argparser parser = argparse.ArgumentParser( 'cmr', description=main_desc,", "action.help is not argparse.SUPPRESS: # Check if this action is a subparser's action", "This function obtains the colormap that was requested def get_cmap(cmap): # Try to", "Add mk_cmod subparser mk_cmod_parser = subparsers.add_parser( 'mkcmod', description=e13.get_main_desc(cmr.create_cmap_mod), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add 'cmap'", "parts of the description of this subcommand parts = [name, desc_lines.pop(0), '\\n'] #", "= sorted(action.choices.keys()) # Loop over all subcommands defined in the action for name", "Convert to a single string and return return(''.join(parts)) # %% COMMAND FUNCTION DEFINITIONS", "show help if 'func' not in ARGS: parser.print_help() # Else, call the corresponding", "been created print(\"Created standalone colormap module of %r in %r.\" % (ARGS.cmap, cmap_path))", "for name in names: # Obtain corresponding subparser subparser = action.choices[name] # Format", "# Set defaults for cmap_colors_parser cmap_colors_parser.set_defaults(func=cli_cmap_colors) # RGB_TABLE COMMAND # Add rgb_table subparser", "mplcm.get_cmap(cmap) # If this does not work, try to expand given cmap in", "packages with colormaps cmap_pkgs = {'cmocean', 'colorcet', 'palettable'} # Obtain packages from CMR_CMAP_PKGS", "\"object path of a colormap (e.g., 'a.b:c.d' -> import a.b; \" \"cmap =", "= import_module(mod_name) # Import the provided object from this module for obj in", "env_pkgs = [] # Add pkgs cmap_pkgs.update(env_pkgs) # Attempt to import each package", "import __version__ import cmasher as cmr # All declaration __all__ = ['main'] #", "# Create a cmap parser for several commands cmap_parent_parser = argparse.ArgumentParser(add_help=False) # Add", "env_pkgs = env_pkgs.split(':') # Else, ignore the variable else: env_pkgs = [] #", "utf-8 -*- # %% IMPORTS # Built-in imports import argparse from importlib import", "return return(''.join(parts)) # %% COMMAND FUNCTION DEFINITIONS # This function handles the 'bibtex'", "up into mod_name and obj_name mod_name, obj_name = cmap.split(':', 1) obj_path = obj_name.split('.')", "# Import cmap packages import_cmap_pkgs() # Print cmap type print(cmr.get_cmap_type(get_cmap(ARGS.cmap))) # This function", "handles the 'take_cmap_colors' subcommand def cli_cmap_colors(): # Import cmap packages import_cmap_pkgs() # Obtain", "Obtain the colors colors = cmr.take_cmap_colors(get_cmap(ARGS.cmap), ARGS.ncolors, cmap_range=ARGS.cmap_range, return_fmt=ARGS.return_fmt) # Print the colors", "= (\"CMasher: Scientific colormaps for making accessible, informative\" \" and 'cmashing' plots\") #", "subparsers.add_parser( 'cmtype', parents=[cmap_parent_parser], description=e13.get_main_desc(cmr.get_cmap_type), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for cmap_type_parser cmap_type_parser.set_defaults(func=cli_cmap_type) #", "name_width, ' ' if(len(name) <= name_width) else '\\n'+' '*help_position) # Split the lines", "packages import_cmap_pkgs() # Print cmap type print(cmr.get_cmap_type(get_cmap(ARGS.cmap))) # This function handles the 'take_cmap_colors'", "# This function obtains the colormap that was requested def get_cmap(cmap): # Try", "def main(): \"\"\" This is the main function of the CLI and is", "if the help of this action is required if action.help is not argparse.SUPPRESS:", "default=cmr.create_cmap_mod.__kwdefaults__['save_dir'], type=str) # Set defaults for mk_cmod_parser mk_cmod_parser.set_defaults(func=cli_mk_cmod) # Parse the arguments global", "a.b; \" \"cmap = a.b.c.d)\"), metavar='CMAP', action='store', type=str) # BIBTEX COMMAND # Add", "the description of this subcommand and add it self._add_item(self.format_subcommands, [name, subparser.description]) # Call", "= cmr.take_cmap_colors.__kwdefaults__ # Create a take_colors parser take_colors_parent_parser = argparse.ArgumentParser(add_help=False) # Add 'cmap_range'", "subcommand def cli_cmap_colors(): # Import cmap packages import_cmap_pkgs() # Obtain the colors colors", "subcommand def cli_mk_cmod(): # Create cmap module cmap_path = cmr.create_cmap_mod(ARGS.cmap, save_dir=ARGS.dir) # Print", "\" \"object path of a colormap (e.g., 'a.b:c.d' -> import a.b; \" \"cmap", "does not work, try to expand given cmap in setuptools-style except ValueError: #", "parser.add_subparsers( title='commands', metavar='COMMAND') # OPTIONAL ARGUMENTS # Add 'version' argument parser.add_argument( '-v', '--version',", "of subcommands class HelpFormatterWithSubCommands(argparse.ArgumentDefaultsHelpFormatter): # Override the add_argument function def add_argument(self, action): #", "[name, desc_lines.pop(0), '\\n'] # Loop over all remaining desc_lines for line in desc_lines:", "defaults = cmr.take_cmap_colors.__kwdefaults__ # Create a take_colors parser take_colors_parent_parser = argparse.ArgumentParser(add_help=False) # Add", "cmap_colors subparser cmap_colors_parser = subparsers.add_parser( 'cmcolors', parents=[cmap_parent_parser, take_colors_parent_parser], description=e13.get_main_desc(cmr.take_cmap_colors), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add", "plots\") # %% CLASS DEFINITIONS # Define formatter that automatically extracts help strings", "to expand given cmap in setuptools-style except ValueError: # Check if cmap contains", "cannot be found!\".format(cmap)) sys.exit() # Return cmap return(cmap) # This function attempts to", "MAIN FUNCTION def main(): \"\"\" This is the main function of the CLI", "# Set defaults for bibtex_parser bibtex_parser.set_defaults(func=cli_bibtex) # CMAP_TYPE COMMAND # Add cmap_type subparser", "type print(cmr.get_cmap_type(get_cmap(ARGS.cmap))) # This function handles the 'take_cmap_colors' subcommand def cli_cmap_colors(): # Import", "except ImportError: pass # %% MAIN FUNCTION def main(): \"\"\" This is the", "= subparsers.add_parser( 'cmcolors', parents=[cmap_parent_parser, take_colors_parent_parser], description=e13.get_main_desc(cmr.take_cmap_colors), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add 'N' argument cmap_colors_parser.add_argument(", "name def format_subcommands(self, name, description): # Determine the positions and widths of the", "# Add 'dir' optional argument mk_cmod_parser.add_argument( '-d', '--dir', help=\"Path to directory where the", "of packages with colormaps cmap_pkgs = {'cmocean', 'colorcet', 'palettable'} # Obtain packages from", "method in all other cases else: super().add_argument(action) # This function formats the description", "in ('float', 'norm'): np.savetxt(sys.stdout, colors, '%.8f') elif ARGS.return_fmt in ('int', '8bit'): np.savetxt(sys.stdout, colors,", "path of a colormap (e.g., 'a.b:c.d' -> import a.b; \" \"cmap = a.b.c.d)\"),", "directory where the module must be saved\", action='store', default=cmr.create_cmap_mod.__kwdefaults__['save_dir'], type=str) # Set defaults", "({!r}) cannot be found!\".format(cmap)) sys.exit() # Return cmap return(cmap) # This function attempts", "subparsers.add_parser( 'cmcolors', parents=[cmap_parent_parser, take_colors_parent_parser], description=e13.get_main_desc(cmr.take_cmap_colors), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add 'N' argument cmap_colors_parser.add_argument( 'ncolors',", "# Set defaults for cmap_type_parser cmap_type_parser.set_defaults(func=cli_cmap_type) # CMAP_COLORS COMMAND # Obtain the optional", "was requested def get_cmap(cmap): # Try to obtain the colormap from MPL try:", "mod_name, obj_name = cmap.split(':', 1) obj_path = obj_name.split('.') # Import the provided module", "rgb_table subparser rgb_table_parser = subparsers.add_parser( 'rgbtable', parents=[cmap_parent_parser, take_colors_parent_parser], description=\"Retrieves the RGB values of", "'-v', '--version', action='version', version=\"CMasher v{}\".format(__version__)) # Create a cmap parser for several commands", "function handles the 'cmap_type' subcommand def cli_cmap_type(): # Import cmap packages import_cmap_pkgs() #", "# This function handles the 'take_cmap_colors' subcommand def cli_cmap_colors(): # Import cmap packages", "colon if ':' in cmap: # Split cmap up into mod_name and obj_name", "env_pkgs.split(';') # Else, if UNIX, split variable at colons elif sys.platform.startswith(('darwin', 'linux')): env_pkgs", "command-line. \"\"\" # Initialize argparser parser = argparse.ArgumentParser( 'cmr', description=main_desc, formatter_class=HelpFormatterWithSubCommands, add_help=True, allow_abbrev=True)", "colors, '%s') # This function handles the 'mkcmod' subcommand def cli_mk_cmod(): # Create", "'norm', 'int', '8bit', 'str', 'hex'], type=str, dest='return_fmt') # Add cmap_colors subparser cmap_colors_parser =", "values of the provided `cmap`.\", formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for rgb_table_parser rgb_table_parser.set_defaults(func=cli_cmap_colors,", "COMMAND # Add mk_cmod subparser mk_cmod_parser = subparsers.add_parser( 'mkcmod', description=e13.get_main_desc(cmr.create_cmap_mod), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) #", "standalone module for\", metavar='CMAP', action='store', type=str) # Add 'dir' optional argument mk_cmod_parser.add_argument( '-d',", "a collection of packages with colormaps def import_cmap_pkgs(): # Define set of packages", "cmap_colors_parser.set_defaults(func=cli_cmap_colors) # RGB_TABLE COMMAND # Add rgb_table subparser rgb_table_parser = subparsers.add_parser( 'rgbtable', parents=[cmap_parent_parser,", "If so, sort action.choices on name names = sorted(action.choices.keys()) # Loop over all", "environment variable env_pkgs = os.environ.get('CMR_CMAP_PKGS', None) # Add env_pkgs to cmap_pkgs if it", "metavar='CMAP', action='store', type=str) # Add 'dir' optional argument mk_cmod_parser.add_argument( '-d', '--dir', help=\"Path to", "this subcommand and add it self._add_item(self.format_subcommands, [name, subparser.description]) # Call super method in", "help=\"Name of *CMasher* colormap to create standalone module for\", metavar='CMAP', action='store', type=str) #", "cmap_type_parser.set_defaults(func=cli_cmap_type) # CMAP_COLORS COMMAND # Obtain the optional default arguments of take_cmap_colors defaults", "None) # Add env_pkgs to cmap_pkgs if it is not empty if env_pkgs", "= [] # Add pkgs cmap_pkgs.update(env_pkgs) # Attempt to import each package for", "return(''.join(parts)) # %% COMMAND FUNCTION DEFINITIONS # This function handles the 'bibtex' subcommand", "' ' if(len(name) <= name_width) else '\\n'+' '*help_position) # Split the lines of", "colors, '%i') else: np.savetxt(sys.stdout, colors, '%s') # This function handles the 'mkcmod' subcommand", "cases else: super().add_argument(action) # This function formats the description of a subcommand with", "sys.exit() # Return cmap return(cmap) # This function attempts to import a collection", "a take_colors parser take_colors_parent_parser = argparse.ArgumentParser(add_help=False) # Add 'cmap_range' optional argument take_colors_parent_parser.add_argument( '--range',", "mk_cmod_parser mk_cmod_parser.set_defaults(func=cli_mk_cmod) # Parse the arguments global ARGS ARGS = parser.parse_args() # If", "split variable at semicolons if sys.platform.startswith('win'): env_pkgs = env_pkgs.split(';') # Else, if UNIX,", "= parser.parse_args() # If arguments is empty (no func was provided), show help", "error and exit print(\"Requested 'CMAP' ({!r}) cannot be found!\".format(cmap)) sys.exit() # Return cmap", "np # CMasher imports from cmasher import __version__ import cmasher as cmr #", "argument parser.add_argument( '-v', '--version', action='version', version=\"CMasher v{}\".format(__version__)) # Create a cmap parser for", "elif ARGS.return_fmt in ('int', '8bit'): np.savetxt(sys.stdout, colors, '%i') else: np.savetxt(sys.stdout, colors, '%s') #", "cmap cmap = import_module(mod_name) # Import the provided object from this module for", "declaration __all__ = ['main'] # %% GLOBALS # Define main description of this", "from this module for obj in obj_path: cmap = getattr(cmap, obj) # If", "bibtex_parser.set_defaults(func=cli_bibtex) # CMAP_TYPE COMMAND # Add cmap_type subparser cmap_type_parser = subparsers.add_parser( 'cmtype', parents=[cmap_parent_parser],", "help=(\"Normalized value range in the colormap from which colors should\" \" be taken\"),", "'cmr', description=main_desc, formatter_class=HelpFormatterWithSubCommands, add_help=True, allow_abbrev=True) # Add subparsers subparsers = parser.add_subparsers( title='commands', metavar='COMMAND')", "type=int) # Set defaults for cmap_colors_parser cmap_colors_parser.set_defaults(func=cli_cmap_colors) # RGB_TABLE COMMAND # Add rgb_table", "a colormap (e.g., 'a.b:c.d' -> import a.b; \" \"cmap = a.b.c.d)\"), metavar='CMAP', action='store',", "the description of a subcommand with given name def format_subcommands(self, name, description): #", "corresponding subparser subparser = action.choices[name] # Format the description of this subcommand and", "given cmap in setuptools-style except ValueError: # Check if cmap contains a colon", "set of packages with colormaps cmap_pkgs = {'cmocean', 'colorcet', 'palettable'} # Obtain packages", "from CMR_CMAP_PKGS environment variable env_pkgs = os.environ.get('CMR_CMAP_PKGS', None) # Add env_pkgs to cmap_pkgs", "# Print cmap type print(cmr.get_cmap_type(get_cmap(ARGS.cmap))) # This function handles the 'take_cmap_colors' subcommand def", "import cm as mplcm import numpy as np # CMasher imports from cmasher", "def cli_bibtex(): cmr.get_bibtex() # This function handles the 'cmap_type' subcommand def cli_cmap_type(): #", "FUNCTION def main(): \"\"\" This is the main function of the CLI and", "allow_abbrev=True) # Add subparsers subparsers = parser.add_subparsers( title='commands', metavar='COMMAND') # OPTIONAL ARGUMENTS #", "# -*- coding: utf-8 -*- # %% IMPORTS # Built-in imports import argparse", "as mplcm import numpy as np # CMasher imports from cmasher import __version__", "= cmap.split(':', 1) obj_path = obj_name.split('.') # Import the provided module as cmap", "cmap.split(':', 1) obj_path = obj_name.split('.') # Import the provided module as cmap cmap", "'palettable'} # Obtain packages from CMR_CMAP_PKGS environment variable env_pkgs = os.environ.get('CMR_CMAP_PKGS', None) #", "COMMAND # Add cmap_type subparser cmap_type_parser = subparsers.add_parser( 'cmtype', parents=[cmap_parent_parser], description=e13.get_main_desc(cmr.get_cmap_type), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True)", "coding: utf-8 -*- # %% IMPORTS # Built-in imports import argparse from importlib", "-*- # %% IMPORTS # Built-in imports import argparse from importlib import import_module", "of colormap to use as registered in *matplotlib* or the \" \"object path", "setuptools-style except ValueError: # Check if cmap contains a colon if ':' in", "COMMAND FUNCTION DEFINITIONS # This function handles the 'bibtex' subcommand def cli_bibtex(): cmr.get_bibtex()", "Try to obtain the colormap from MPL try: cmap = mplcm.get_cmap(cmap) # If", "on name names = sorted(action.choices.keys()) # Loop over all subcommands defined in the", "the help of this action is required if action.help is not argparse.SUPPRESS: #", "# This function attempts to import a collection of packages with colormaps def", "def import_cmap_pkgs(): # Define set of packages with colormaps cmap_pkgs = {'cmocean', 'colorcet',", "subcommands class HelpFormatterWithSubCommands(argparse.ArgumentDefaultsHelpFormatter): # Override the add_argument function def add_argument(self, action): # Check", "cmap_colors_parser.add_argument( 'ncolors', help=\"Number of colors to take\", metavar='N', action='store', type=int) # Set defaults", "name in names: # Obtain corresponding subparser subparser = action.choices[name] # Format the", "description=e13.get_main_desc(cmr.create_cmap_mod), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add 'cmap' argument mk_cmod_parser.add_argument( 'cmap', help=\"Name of *CMasher* colormap", "description of this subcommand and add it self._add_item(self.format_subcommands, [name, subparser.description]) # Call super", "list of all parts of the description of this subcommand parts = [name,", "try: import_module(cmap_pkg) except ImportError: pass # %% MAIN FUNCTION def main(): \"\"\" This", "given name def format_subcommands(self, name, description): # Determine the positions and widths of", "Check if the help of this action is required if action.help is not", "cmap return(cmap) # This function attempts to import a collection of packages with", "created print(\"Created standalone colormap module of %r in %r.\" % (ARGS.cmap, cmap_path)) #", "the action for name in names: # Obtain corresponding subparser subparser = action.choices[name]", "argparse.ArgumentParser(add_help=False) # Add 'cmap' argument cmap_parent_parser.add_argument( 'cmap', help=(\"Name of colormap to use as", "mk_cmod subparser mk_cmod_parser = subparsers.add_parser( 'mkcmod', description=e13.get_main_desc(cmr.create_cmap_mod), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add 'cmap' argument", "Scientific colormaps for making accessible, informative\" \" and 'cmashing' plots\") # %% CLASS", "cmap_range=ARGS.cmap_range, return_fmt=ARGS.return_fmt) # Print the colors line-by-line if ARGS.return_fmt in ('float', 'norm'): np.savetxt(sys.stdout,", "All declaration __all__ = ['main'] # %% GLOBALS # Define main description of", "'8bit'): np.savetxt(sys.stdout, colors, '%i') else: np.savetxt(sys.stdout, colors, '%s') # This function handles the", "\"\"\" This is the main function of the CLI and is called whenever", "'cmashing' plots\") # %% CLASS DEFINITIONS # Define formatter that automatically extracts help", "Print on commandline that module has been created print(\"Created standalone colormap module of", "if(len(name) <= name_width) else '\\n'+' '*help_position) # Split the lines of the subcommand", "action.choices[name] # Format the description of this subcommand and add it self._add_item(self.format_subcommands, [name,", "use as registered in *matplotlib* or the \" \"object path of a colormap", "arguments of take_cmap_colors defaults = cmr.take_cmap_colors.__kwdefaults__ # Create a take_colors parser take_colors_parent_parser =", "has been created print(\"Created standalone colormap module of %r in %r.\" % (ARGS.cmap,", "# Obtain the colors colors = cmr.take_cmap_colors(get_cmap(ARGS.cmap), ARGS.ncolors, cmap_range=ARGS.cmap_range, return_fmt=ARGS.return_fmt) # Print the", "parser.add_argument( '-v', '--version', action='version', version=\"CMasher v{}\".format(__version__)) # Create a cmap parser for several", "Split the lines of the subcommand description desc_lines = self._split_lines(description, help_width) # Create", "parents=[cmap_parent_parser], description=e13.get_main_desc(cmr.get_cmap_type), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for cmap_type_parser cmap_type_parser.set_defaults(func=cli_cmap_type) # CMAP_COLORS COMMAND", "cmr.take_cmap_colors(get_cmap(ARGS.cmap), ARGS.ncolors, cmap_range=ARGS.cmap_range, return_fmt=ARGS.return_fmt) # Print the colors line-by-line if ARGS.return_fmt in ('float',", "to take\", metavar='N', action='store', type=int) # Set defaults for cmap_colors_parser cmap_colors_parser.set_defaults(func=cli_cmap_colors) # RGB_TABLE", "nargs=2, default=defaults['cmap_range'], type=float, dest='cmap_range') # Add 'fmt' optional argument take_colors_parent_parser.add_argument( '--fmt', help=\"Format to", "in cmap_pkgs: try: import_module(cmap_pkg) except ImportError: pass # %% MAIN FUNCTION def main():", "import_cmap_pkgs(): # Define set of packages with colormaps cmap_pkgs = {'cmocean', 'colorcet', 'palettable'}", "the description of this subcommand parts = [name, desc_lines.pop(0), '\\n'] # Loop over", "ARGUMENTS # Add 'version' argument parser.add_argument( '-v', '--version', action='version', version=\"CMasher v{}\".format(__version__)) # Create", "# Try to obtain the colormap from MPL try: cmap = mplcm.get_cmap(cmap) #", "parser.parse_args() # If arguments is empty (no func was provided), show help if", "name names = sorted(action.choices.keys()) # Loop over all subcommands defined in the action", "% (' '*help_position, line)) # Convert to a single string and return return(''.join(parts))", "variable at colons elif sys.platform.startswith(('darwin', 'linux')): env_pkgs = env_pkgs.split(':') # Else, ignore the", "else: super().add_argument(action) # This function formats the description of a subcommand with given", "found!\".format(cmap)) sys.exit() # Return cmap return(cmap) # This function attempts to import a", "= max(self._width-help_position, 11) name_width = help_position-self._current_indent-2 # Transform name to the proper formatting", "as cmr # All declaration __all__ = ['main'] # %% GLOBALS # Define", "This function handles the 'mkcmod' subcommand def cli_mk_cmod(): # Create cmap module cmap_path", "description of a subcommand with given name def format_subcommands(self, name, description): # Determine", "return colors in\", action='store', default=defaults['return_fmt'], choices=['float', 'norm', 'int', '8bit', 'str', 'hex'], type=str, dest='return_fmt')", "= env_pkgs.split(';') # Else, if UNIX, split variable at colons elif sys.platform.startswith(('darwin', 'linux')):", "os import sys # Package imports import e13tools as e13 from matplotlib import", "was provided), show help if 'func' not in ARGS: parser.print_help() # Else, call", "obtains the colormap that was requested def get_cmap(cmap): # Try to obtain the", "for obj in obj_path: cmap = getattr(cmap, obj) # If cmap is still", "# OPTIONAL ARGUMENTS # Add 'version' argument parser.add_argument( '-v', '--version', action='version', version=\"CMasher v{}\".format(__version__))", "contains a colon if ':' in cmap: # Split cmap up into mod_name", "env_pkgs.split(':') # Else, ignore the variable else: env_pkgs = [] # Add pkgs", "def cli_mk_cmod(): # Create cmap module cmap_path = cmr.create_cmap_mod(ARGS.cmap, save_dir=ARGS.dir) # Print on", "# Add 'fmt' optional argument take_colors_parent_parser.add_argument( '--fmt', help=\"Format to return colors in\", action='store',", "'take_cmap_colors' subcommand def cli_cmap_colors(): # Import cmap packages import_cmap_pkgs() # Obtain the colors", "# If arguments is empty (no func was provided), show help if 'func'", "all parts of the description of this subcommand parts = [name, desc_lines.pop(0), '\\n']", "in\", action='store', default=defaults['return_fmt'], choices=['float', 'norm', 'int', '8bit', 'str', 'hex'], type=str, dest='return_fmt') # Add", "cmap_parent_parser.add_argument( 'cmap', help=(\"Name of colormap to use as registered in *matplotlib* or the", "in *matplotlib* or the \" \"object path of a colormap (e.g., 'a.b:c.d' ->", "This function attempts to import a collection of packages with colormaps def import_cmap_pkgs():", "[name, subparser.description]) # Call super method in all other cases else: super().add_argument(action) #", "Set defaults for bibtex_parser bibtex_parser.set_defaults(func=cli_bibtex) # CMAP_TYPE COMMAND # Add cmap_type subparser cmap_type_parser", "\"cmap = a.b.c.d)\"), metavar='CMAP', action='store', type=str) # BIBTEX COMMAND # Add bibtex subparser", "cmap_type subparser cmap_type_parser = subparsers.add_parser( 'cmtype', parents=[cmap_parent_parser], description=e13.get_main_desc(cmr.get_cmap_type), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults", "['main'] # %% GLOBALS # Define main description of this package main_desc =", "ImportError: pass # %% MAIN FUNCTION def main(): \"\"\" This is the main", "of this package main_desc = (\"CMasher: Scientific colormaps for making accessible, informative\" \"", "= env_pkgs.split(':') # Else, ignore the variable else: env_pkgs = [] # Add", "matplotlib import cm as mplcm import numpy as np # CMasher imports from", "subparser's action if isinstance(action, argparse._SubParsersAction): # If so, sort action.choices on name names", "# Define formatter that automatically extracts help strings of subcommands class HelpFormatterWithSubCommands(argparse.ArgumentDefaultsHelpFormatter): #", "# Attempt to import each package for cmap_pkg in cmap_pkgs: try: import_module(cmap_pkg) except", "cmr.get_bibtex() # This function handles the 'cmap_type' subcommand def cli_cmap_type(): # Import cmap", "if ARGS.return_fmt in ('float', 'norm'): np.savetxt(sys.stdout, colors, '%.8f') elif ARGS.return_fmt in ('int', '8bit'):", "CMAP_TYPE COMMAND # Add cmap_type subparser cmap_type_parser = subparsers.add_parser( 'cmtype', parents=[cmap_parent_parser], description=e13.get_main_desc(cmr.get_cmap_type), formatter_class=argparse.ArgumentDefaultsHelpFormatter,", "help_width = max(self._width-help_position, 11) name_width = help_position-self._current_indent-2 # Transform name to the proper", "making accessible, informative\" \" and 'cmashing' plots\") # %% CLASS DEFINITIONS # Define", "%% GLOBALS # Define main description of this package main_desc = (\"CMasher: Scientific", "(' '*help_position, line)) # Convert to a single string and return return(''.join(parts)) #", "Print error and exit print(\"Requested 'CMAP' ({!r}) cannot be found!\".format(cmap)) sys.exit() # Return", "'fmt' optional argument take_colors_parent_parser.add_argument( '--fmt', help=\"Format to return colors in\", action='store', default=defaults['return_fmt'], choices=['float',", "to obtain the colormap from MPL try: cmap = mplcm.get_cmap(cmap) # If this", "if this action is a subparser's action if isinstance(action, argparse._SubParsersAction): # If so,", "# Split the lines of the subcommand description desc_lines = self._split_lines(description, help_width) #", "argument take_colors_parent_parser.add_argument( '--range', help=(\"Normalized value range in the colormap from which colors should\"", "description): # Determine the positions and widths of the help texts help_position =", "cmap packages import_cmap_pkgs() # Obtain the colors colors = cmr.take_cmap_colors(get_cmap(ARGS.cmap), ARGS.ncolors, cmap_range=ARGS.cmap_range, return_fmt=ARGS.return_fmt)", "the arguments global ARGS ARGS = parser.parse_args() # If arguments is empty (no", "the positions and widths of the help texts help_position = min(self._action_max_length+2, self._max_help_position) help_width", "for rgb_table_parser rgb_table_parser.set_defaults(func=cli_cmap_colors, ncolors=None) # MK_CMOD COMMAND # Add mk_cmod subparser mk_cmod_parser =", "type=float, dest='cmap_range') # Add 'fmt' optional argument take_colors_parent_parser.add_argument( '--fmt', help=\"Format to return colors", "of the CLI and is called whenever `cmr` is invoked from the command-line.", "cmap module cmap_path = cmr.create_cmap_mod(ARGS.cmap, save_dir=ARGS.dir) # Print on commandline that module has", "this does not work, try to expand given cmap in setuptools-style except ValueError:", "argument mk_cmod_parser.add_argument( 'cmap', help=\"Name of *CMasher* colormap to create standalone module for\", metavar='CMAP',", "this module for obj in obj_path: cmap = getattr(cmap, obj) # If cmap", "import argparse from importlib import import_module import os import sys # Package imports", "cli_mk_cmod(): # Create cmap module cmap_path = cmr.create_cmap_mod(ARGS.cmap, save_dir=ARGS.dir) # Print on commandline", "obj_name mod_name, obj_name = cmap.split(':', 1) obj_path = obj_name.split('.') # Import the provided", "RGB_TABLE COMMAND # Add rgb_table subparser rgb_table_parser = subparsers.add_parser( 'rgbtable', parents=[cmap_parent_parser, take_colors_parent_parser], description=\"Retrieves", "obj) # If cmap is still a string, raise error if isinstance(cmap, str):", "colormap module of %r in %r.\" % (ARGS.cmap, cmap_path)) # %% FUNCTION DEFINITIONS", "colors should\" \" be taken\"), metavar=('LOWER', 'UPPER'), action='store', nargs=2, default=defaults['cmap_range'], type=float, dest='cmap_range') #", "the lines of the subcommand description desc_lines = self._split_lines(description, help_width) # Create list", "mk_cmod_parser.set_defaults(func=cli_mk_cmod) # Parse the arguments global ARGS ARGS = parser.parse_args() # If arguments", "cmasher import __version__ import cmasher as cmr # All declaration __all__ = ['main']", "package for cmap_pkg in cmap_pkgs: try: import_module(cmap_pkg) except ImportError: pass # %% MAIN", "informative\" \" and 'cmashing' plots\") # %% CLASS DEFINITIONS # Define formatter that", "# Obtain corresponding subparser subparser = action.choices[name] # Format the description of this", "super method in all other cases else: super().add_argument(action) # This function formats the", "for line in desc_lines: # Format and add to parts parts.append(\"%s%s\\n\" % ('", "# Format and add to parts parts.append(\"%s%s\\n\" % (' '*help_position, line)) # Convert", "# Obtain packages from CMR_CMAP_PKGS environment variable env_pkgs = os.environ.get('CMR_CMAP_PKGS', None) # Add", "Else, if UNIX, split variable at colons elif sys.platform.startswith(('darwin', 'linux')): env_pkgs = env_pkgs.split(':')", "metavar=('LOWER', 'UPPER'), action='store', nargs=2, default=defaults['cmap_range'], type=float, dest='cmap_range') # Add 'fmt' optional argument take_colors_parent_parser.add_argument(", "obj_name.split('.') # Import the provided module as cmap cmap = import_module(mod_name) # Import", "sys.platform.startswith('win'): env_pkgs = env_pkgs.split(';') # Else, if UNIX, split variable at colons elif", "formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add 'N' argument cmap_colors_parser.add_argument( 'ncolors', help=\"Number of colors to take\",", "action='store', default=defaults['return_fmt'], choices=['float', 'norm', 'int', '8bit', 'str', 'hex'], type=str, dest='return_fmt') # Add cmap_colors", "min(self._action_max_length+2, self._max_help_position) help_width = max(self._width-help_position, 11) name_width = help_position-self._current_indent-2 # Transform name to", "# Print on commandline that module has been created print(\"Created standalone colormap module", "colors colors = cmr.take_cmap_colors(get_cmap(ARGS.cmap), ARGS.ncolors, cmap_range=ARGS.cmap_range, return_fmt=ARGS.return_fmt) # Print the colors line-by-line if", "-*- coding: utf-8 -*- # %% IMPORTS # Built-in imports import argparse from", "optional argument take_colors_parent_parser.add_argument( '--range', help=(\"Normalized value range in the colormap from which colors", "subparser bibtex_parser = subparsers.add_parser( 'bibtex', description=e13.get_main_desc(cmr.get_bibtex), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for bibtex_parser", "COMMAND # Obtain the optional default arguments of take_cmap_colors defaults = cmr.take_cmap_colors.__kwdefaults__ #", "# Check if cmap contains a colon if ':' in cmap: # Split", "for mk_cmod_parser mk_cmod_parser.set_defaults(func=cli_mk_cmod) # Parse the arguments global ARGS ARGS = parser.parse_args() #", "subparser subparser = action.choices[name] # Format the description of this subcommand and add", "raise error if isinstance(cmap, str): # Print error and exit print(\"Requested 'CMAP' ({!r})", "formats the description of a subcommand with given name def format_subcommands(self, name, description):", "each package for cmap_pkg in cmap_pkgs: try: import_module(cmap_pkg) except ImportError: pass # %%", "for cmap_type_parser cmap_type_parser.set_defaults(func=cli_cmap_type) # CMAP_COLORS COMMAND # Obtain the optional default arguments of", "__version__ import cmasher as cmr # All declaration __all__ = ['main'] # %%", "subparsers.add_parser( 'rgbtable', parents=[cmap_parent_parser, take_colors_parent_parser], description=\"Retrieves the RGB values of the provided `cmap`.\", formatter_class=argparse.ArgumentDefaultsHelpFormatter,", "# Split cmap up into mod_name and obj_name mod_name, obj_name = cmap.split(':', 1)", "empty (no func was provided), show help if 'func' not in ARGS: parser.print_help()", "subparsers.add_parser( 'bibtex', description=e13.get_main_desc(cmr.get_bibtex), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for bibtex_parser bibtex_parser.set_defaults(func=cli_bibtex) # CMAP_TYPE", "{'cmocean', 'colorcet', 'palettable'} # Obtain packages from CMR_CMAP_PKGS environment variable env_pkgs = os.environ.get('CMR_CMAP_PKGS',", "name_width) else '\\n'+' '*help_position) # Split the lines of the subcommand description desc_lines", "several commands cmap_parent_parser = argparse.ArgumentParser(add_help=False) # Add 'cmap' argument cmap_parent_parser.add_argument( 'cmap', help=(\"Name of", "cmap_type_parser = subparsers.add_parser( 'cmtype', parents=[cmap_parent_parser], description=e13.get_main_desc(cmr.get_cmap_type), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for cmap_type_parser", "global ARGS ARGS = parser.parse_args() # If arguments is empty (no func was", "Import cmap packages import_cmap_pkgs() # Print cmap type print(cmr.get_cmap_type(get_cmap(ARGS.cmap))) # This function handles", "= help_position-self._current_indent-2 # Transform name to the proper formatting name = \"{0}{1: <{2}}{3}\".format(", "module of %r in %r.\" % (ARGS.cmap, cmap_path)) # %% FUNCTION DEFINITIONS #", "'cmap', help=\"Name of *CMasher* colormap to create standalone module for\", metavar='CMAP', action='store', type=str)", "Built-in imports import argparse from importlib import import_module import os import sys #", "cmap_path)) # %% FUNCTION DEFINITIONS # This function obtains the colormap that was", "variable env_pkgs = os.environ.get('CMR_CMAP_PKGS', None) # Add env_pkgs to cmap_pkgs if it is", "= argparse.ArgumentParser(add_help=False) # Add 'cmap' argument cmap_parent_parser.add_argument( 'cmap', help=(\"Name of colormap to use", "COMMAND # Add bibtex subparser bibtex_parser = subparsers.add_parser( 'bibtex', description=e13.get_main_desc(cmr.get_bibtex), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) #", "\"{0}{1: <{2}}{3}\".format( ' '*self._current_indent, name, name_width, ' ' if(len(name) <= name_width) else '\\n'+'", "for\", metavar='CMAP', action='store', type=str) # Add 'dir' optional argument mk_cmod_parser.add_argument( '-d', '--dir', help=\"Path", "\" be taken\"), metavar=('LOWER', 'UPPER'), action='store', nargs=2, default=defaults['cmap_range'], type=float, dest='cmap_range') # Add 'fmt'", "the add_argument function def add_argument(self, action): # Check if the help of this", "This is the main function of the CLI and is called whenever `cmr`", "parents=[cmap_parent_parser, take_colors_parent_parser], description=e13.get_main_desc(cmr.take_cmap_colors), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add 'N' argument cmap_colors_parser.add_argument( 'ncolors', help=\"Number of", "be found!\".format(cmap)) sys.exit() # Return cmap return(cmap) # This function attempts to import", "'%i') else: np.savetxt(sys.stdout, colors, '%s') # This function handles the 'mkcmod' subcommand def", "standalone colormap module of %r in %r.\" % (ARGS.cmap, cmap_path)) # %% FUNCTION", "work, try to expand given cmap in setuptools-style except ValueError: # Check if", "print(\"Created standalone colormap module of %r in %r.\" % (ARGS.cmap, cmap_path)) # %%", "# If cmap is still a string, raise error if isinstance(cmap, str): #", "if action.help is not argparse.SUPPRESS: # Check if this action is a subparser's", "save_dir=ARGS.dir) # Print on commandline that module has been created print(\"Created standalone colormap", "in obj_path: cmap = getattr(cmap, obj) # If cmap is still a string,", "# MK_CMOD COMMAND # Add mk_cmod subparser mk_cmod_parser = subparsers.add_parser( 'mkcmod', description=e13.get_main_desc(cmr.create_cmap_mod), formatter_class=argparse.ArgumentDefaultsHelpFormatter,", "# Else, if UNIX, split variable at colons elif sys.platform.startswith(('darwin', 'linux')): env_pkgs =", "with colormaps def import_cmap_pkgs(): # Define set of packages with colormaps cmap_pkgs =", "[] # Add pkgs cmap_pkgs.update(env_pkgs) # Attempt to import each package for cmap_pkg", "# Set defaults for rgb_table_parser rgb_table_parser.set_defaults(func=cli_cmap_colors, ncolors=None) # MK_CMOD COMMAND # Add mk_cmod", "defaults for mk_cmod_parser mk_cmod_parser.set_defaults(func=cli_mk_cmod) # Parse the arguments global ARGS ARGS = parser.parse_args()", "# Parse the arguments global ARGS ARGS = parser.parse_args() # If arguments is", "import_cmap_pkgs() # Print cmap type print(cmr.get_cmap_type(get_cmap(ARGS.cmap))) # This function handles the 'take_cmap_colors' subcommand", "= action.choices[name] # Format the description of this subcommand and add it self._add_item(self.format_subcommands,", "Call super method in all other cases else: super().add_argument(action) # This function formats", "%% FUNCTION DEFINITIONS # This function obtains the colormap that was requested def", "Split cmap up into mod_name and obj_name mod_name, obj_name = cmap.split(':', 1) obj_path", "\"\"\" # Initialize argparser parser = argparse.ArgumentParser( 'cmr', description=main_desc, formatter_class=HelpFormatterWithSubCommands, add_help=True, allow_abbrev=True) #", "'-d', '--dir', help=\"Path to directory where the module must be saved\", action='store', default=cmr.create_cmap_mod.__kwdefaults__['save_dir'],", "# Add cmap_colors subparser cmap_colors_parser = subparsers.add_parser( 'cmcolors', parents=[cmap_parent_parser, take_colors_parent_parser], description=e13.get_main_desc(cmr.take_cmap_colors), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True)", "help=\"Number of colors to take\", metavar='N', action='store', type=int) # Set defaults for cmap_colors_parser", "'*help_position) # Split the lines of the subcommand description desc_lines = self._split_lines(description, help_width)", "bibtex_parser bibtex_parser.set_defaults(func=cli_bibtex) # CMAP_TYPE COMMAND # Add cmap_type subparser cmap_type_parser = subparsers.add_parser( 'cmtype',", "self._max_help_position) help_width = max(self._width-help_position, 11) name_width = help_position-self._current_indent-2 # Transform name to the", "default=defaults['return_fmt'], choices=['float', 'norm', 'int', '8bit', 'str', 'hex'], type=str, dest='return_fmt') # Add cmap_colors subparser", "'rgbtable', parents=[cmap_parent_parser, take_colors_parent_parser], description=\"Retrieves the RGB values of the provided `cmap`.\", formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True)", "defaults for rgb_table_parser rgb_table_parser.set_defaults(func=cli_cmap_colors, ncolors=None) # MK_CMOD COMMAND # Add mk_cmod subparser mk_cmod_parser", "ARGS ARGS = parser.parse_args() # If arguments is empty (no func was provided),", "'ncolors', help=\"Number of colors to take\", metavar='N', action='store', type=int) # Set defaults for", "function obtains the colormap that was requested def get_cmap(cmap): # Try to obtain", "the colors line-by-line if ARGS.return_fmt in ('float', 'norm'): np.savetxt(sys.stdout, colors, '%.8f') elif ARGS.return_fmt", "version=\"CMasher v{}\".format(__version__)) # Create a cmap parser for several commands cmap_parent_parser = argparse.ArgumentParser(add_help=False)", "packages from CMR_CMAP_PKGS environment variable env_pkgs = os.environ.get('CMR_CMAP_PKGS', None) # Add env_pkgs to", "the \" \"object path of a colormap (e.g., 'a.b:c.d' -> import a.b; \"", "description=e13.get_main_desc(cmr.take_cmap_colors), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add 'N' argument cmap_colors_parser.add_argument( 'ncolors', help=\"Number of colors to", "main(): \"\"\" This is the main function of the CLI and is called", "= ['main'] # %% GLOBALS # Define main description of this package main_desc", "description=main_desc, formatter_class=HelpFormatterWithSubCommands, add_help=True, allow_abbrev=True) # Add subparsers subparsers = parser.add_subparsers( title='commands', metavar='COMMAND') #", "'linux')): env_pkgs = env_pkgs.split(':') # Else, ignore the variable else: env_pkgs = []", "is called whenever `cmr` is invoked from the command-line. \"\"\" # Initialize argparser", "title='commands', metavar='COMMAND') # OPTIONAL ARGUMENTS # Add 'version' argument parser.add_argument( '-v', '--version', action='version',", "is a subparser's action if isinstance(action, argparse._SubParsersAction): # If so, sort action.choices on", "add_argument(self, action): # Check if the help of this action is required if", "cmap up into mod_name and obj_name mod_name, obj_name = cmap.split(':', 1) obj_path =", "# If so, sort action.choices on name names = sorted(action.choices.keys()) # Loop over", "__all__ = ['main'] # %% GLOBALS # Define main description of this package", "package main_desc = (\"CMasher: Scientific colormaps for making accessible, informative\" \" and 'cmashing'", "import_cmap_pkgs() # Obtain the colors colors = cmr.take_cmap_colors(get_cmap(ARGS.cmap), ARGS.ncolors, cmap_range=ARGS.cmap_range, return_fmt=ARGS.return_fmt) # Print", "name_width = help_position-self._current_indent-2 # Transform name to the proper formatting name = \"{0}{1:", "strings of subcommands class HelpFormatterWithSubCommands(argparse.ArgumentDefaultsHelpFormatter): # Override the add_argument function def add_argument(self, action):", "colormap to use as registered in *matplotlib* or the \" \"object path of", "and is called whenever `cmr` is invoked from the command-line. \"\"\" # Initialize", "# CMAP_COLORS COMMAND # Obtain the optional default arguments of take_cmap_colors defaults =", "elif sys.platform.startswith(('darwin', 'linux')): env_pkgs = env_pkgs.split(':') # Else, ignore the variable else: env_pkgs", "Obtain the optional default arguments of take_cmap_colors defaults = cmr.take_cmap_colors.__kwdefaults__ # Create a", "optional argument take_colors_parent_parser.add_argument( '--fmt', help=\"Format to return colors in\", action='store', default=defaults['return_fmt'], choices=['float', 'norm',", "Attempt to import each package for cmap_pkg in cmap_pkgs: try: import_module(cmap_pkg) except ImportError:", "subparser mk_cmod_parser = subparsers.add_parser( 'mkcmod', description=e13.get_main_desc(cmr.create_cmap_mod), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add 'cmap' argument mk_cmod_parser.add_argument(", "module for\", metavar='CMAP', action='store', type=str) # Add 'dir' optional argument mk_cmod_parser.add_argument( '-d', '--dir',", "defaults for bibtex_parser bibtex_parser.set_defaults(func=cli_bibtex) # CMAP_TYPE COMMAND # Add cmap_type subparser cmap_type_parser =", "isinstance(action, argparse._SubParsersAction): # If so, sort action.choices on name names = sorted(action.choices.keys()) #", "Else, ignore the variable else: env_pkgs = [] # Add pkgs cmap_pkgs.update(env_pkgs) #", "argument take_colors_parent_parser.add_argument( '--fmt', help=\"Format to return colors in\", action='store', default=defaults['return_fmt'], choices=['float', 'norm', 'int',", "pkgs cmap_pkgs.update(env_pkgs) # Attempt to import each package for cmap_pkg in cmap_pkgs: try:", "return(cmap) # This function attempts to import a collection of packages with colormaps", "cmap contains a colon if ':' in cmap: # Split cmap up into", "bibtex subparser bibtex_parser = subparsers.add_parser( 'bibtex', description=e13.get_main_desc(cmr.get_bibtex), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for", "help_position = min(self._action_max_length+2, self._max_help_position) help_width = max(self._width-help_position, 11) name_width = help_position-self._current_indent-2 # Transform", "value range in the colormap from which colors should\" \" be taken\"), metavar=('LOWER',", "colormap that was requested def get_cmap(cmap): # Try to obtain the colormap from", "that automatically extracts help strings of subcommands class HelpFormatterWithSubCommands(argparse.ArgumentDefaultsHelpFormatter): # Override the add_argument", "Return cmap return(cmap) # This function attempts to import a collection of packages", "cmap_pkgs: try: import_module(cmap_pkg) except ImportError: pass # %% MAIN FUNCTION def main(): \"\"\"", "attempts to import a collection of packages with colormaps def import_cmap_pkgs(): # Define", "default=defaults['cmap_range'], type=float, dest='cmap_range') # Add 'fmt' optional argument take_colors_parent_parser.add_argument( '--fmt', help=\"Format to return", "not None: # If Windows, split variable at semicolons if sys.platform.startswith('win'): env_pkgs =", "None: # If Windows, split variable at semicolons if sys.platform.startswith('win'): env_pkgs = env_pkgs.split(';')", "add_help=True) # Set defaults for rgb_table_parser rgb_table_parser.set_defaults(func=cli_cmap_colors, ncolors=None) # MK_CMOD COMMAND # Add", "required if action.help is not argparse.SUPPRESS: # Check if this action is a", "argparse.SUPPRESS: # Check if this action is a subparser's action if isinstance(action, argparse._SubParsersAction):", "action='store', type=str) # BIBTEX COMMAND # Add bibtex subparser bibtex_parser = subparsers.add_parser( 'bibtex',", "cli_cmap_colors(): # Import cmap packages import_cmap_pkgs() # Obtain the colors colors = cmr.take_cmap_colors(get_cmap(ARGS.cmap),", "\" \"cmap = a.b.c.d)\"), metavar='CMAP', action='store', type=str) # BIBTEX COMMAND # Add bibtex", "# Loop over all remaining desc_lines for line in desc_lines: # Format and", "in the colormap from which colors should\" \" be taken\"), metavar=('LOWER', 'UPPER'), action='store',", "colormap from which colors should\" \" be taken\"), metavar=('LOWER', 'UPPER'), action='store', nargs=2, default=defaults['cmap_range'],", "dest='return_fmt') # Add cmap_colors subparser cmap_colors_parser = subparsers.add_parser( 'cmcolors', parents=[cmap_parent_parser, take_colors_parent_parser], description=e13.get_main_desc(cmr.take_cmap_colors), formatter_class=argparse.ArgumentDefaultsHelpFormatter,", "be taken\"), metavar=('LOWER', 'UPPER'), action='store', nargs=2, default=defaults['cmap_range'], type=float, dest='cmap_range') # Add 'fmt' optional", "subparser rgb_table_parser = subparsers.add_parser( 'rgbtable', parents=[cmap_parent_parser, take_colors_parent_parser], description=\"Retrieves the RGB values of the", "to directory where the module must be saved\", action='store', default=cmr.create_cmap_mod.__kwdefaults__['save_dir'], type=str) # Set", "string, raise error if isinstance(cmap, str): # Print error and exit print(\"Requested 'CMAP'", "action='store', type=str) # Add 'dir' optional argument mk_cmod_parser.add_argument( '-d', '--dir', help=\"Path to directory", "argument mk_cmod_parser.add_argument( '-d', '--dir', help=\"Path to directory where the module must be saved\",", "function handles the 'mkcmod' subcommand def cli_mk_cmod(): # Create cmap module cmap_path =", "registered in *matplotlib* or the \" \"object path of a colormap (e.g., 'a.b:c.d'", "cmap_colors_parser cmap_colors_parser.set_defaults(func=cli_cmap_colors) # RGB_TABLE COMMAND # Add rgb_table subparser rgb_table_parser = subparsers.add_parser( 'rgbtable',", "description=\"Retrieves the RGB values of the provided `cmap`.\", formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults", "desc_lines: # Format and add to parts parts.append(\"%s%s\\n\" % (' '*help_position, line)) #", "the provided module as cmap cmap = import_module(mod_name) # Import the provided object", "Define formatter that automatically extracts help strings of subcommands class HelpFormatterWithSubCommands(argparse.ArgumentDefaultsHelpFormatter): # Override", "If Windows, split variable at semicolons if sys.platform.startswith('win'): env_pkgs = env_pkgs.split(';') # Else,", "help=\"Format to return colors in\", action='store', default=defaults['return_fmt'], choices=['float', 'norm', 'int', '8bit', 'str', 'hex'],", "rgb_table_parser rgb_table_parser.set_defaults(func=cli_cmap_colors, ncolors=None) # MK_CMOD COMMAND # Add mk_cmod subparser mk_cmod_parser = subparsers.add_parser(", "import sys # Package imports import e13tools as e13 from matplotlib import cm", "at colons elif sys.platform.startswith(('darwin', 'linux')): env_pkgs = env_pkgs.split(':') # Else, ignore the variable", "'func' not in ARGS: parser.print_help() # Else, call the corresponding function else: ARGS.func()", "the subcommand description desc_lines = self._split_lines(description, help_width) # Create list of all parts", "which colors should\" \" be taken\"), metavar=('LOWER', 'UPPER'), action='store', nargs=2, default=defaults['cmap_range'], type=float, dest='cmap_range')", "in cmap: # Split cmap up into mod_name and obj_name mod_name, obj_name =", "# %% IMPORTS # Built-in imports import argparse from importlib import import_module import", "= subparsers.add_parser( 'cmtype', parents=[cmap_parent_parser], description=e13.get_main_desc(cmr.get_cmap_type), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for cmap_type_parser cmap_type_parser.set_defaults(func=cli_cmap_type)", "ARGS.return_fmt in ('float', 'norm'): np.savetxt(sys.stdout, colors, '%.8f') elif ARGS.return_fmt in ('int', '8bit'): np.savetxt(sys.stdout,", "collection of packages with colormaps def import_cmap_pkgs(): # Define set of packages with", "subparsers = parser.add_subparsers( title='commands', metavar='COMMAND') # OPTIONAL ARGUMENTS # Add 'version' argument parser.add_argument(", "# Set defaults for mk_cmod_parser mk_cmod_parser.set_defaults(func=cli_mk_cmod) # Parse the arguments global ARGS ARGS", "from cmasher import __version__ import cmasher as cmr # All declaration __all__ =", "the colors colors = cmr.take_cmap_colors(get_cmap(ARGS.cmap), ARGS.ncolors, cmap_range=ARGS.cmap_range, return_fmt=ARGS.return_fmt) # Print the colors line-by-line", "'cmap_range' optional argument take_colors_parent_parser.add_argument( '--range', help=(\"Normalized value range in the colormap from which", "'int', '8bit', 'str', 'hex'], type=str, dest='return_fmt') # Add cmap_colors subparser cmap_colors_parser = subparsers.add_parser(", "argparser parser = argparse.ArgumentParser( 'cmr', description=main_desc, formatter_class=HelpFormatterWithSubCommands, add_help=True, allow_abbrev=True) # Add subparsers subparsers", "arguments global ARGS ARGS = parser.parse_args() # If arguments is empty (no func", "# %% CLASS DEFINITIONS # Define formatter that automatically extracts help strings of", "texts help_position = min(self._action_max_length+2, self._max_help_position) help_width = max(self._width-help_position, 11) name_width = help_position-self._current_indent-2 #", "subparser cmap_colors_parser = subparsers.add_parser( 'cmcolors', parents=[cmap_parent_parser, take_colors_parent_parser], description=e13.get_main_desc(cmr.take_cmap_colors), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add 'N'", "Loop over all remaining desc_lines for line in desc_lines: # Format and add", "Override the add_argument function def add_argument(self, action): # Check if the help of", "for bibtex_parser bibtex_parser.set_defaults(func=cli_bibtex) # CMAP_TYPE COMMAND # Add cmap_type subparser cmap_type_parser = subparsers.add_parser(", "# Format the description of this subcommand and add it self._add_item(self.format_subcommands, [name, subparser.description])", "%% CLASS DEFINITIONS # Define formatter that automatically extracts help strings of subcommands", "subcommand with given name def format_subcommands(self, name, description): # Determine the positions and", "if isinstance(action, argparse._SubParsersAction): # If so, sort action.choices on name names = sorted(action.choices.keys())", "if UNIX, split variable at colons elif sys.platform.startswith(('darwin', 'linux')): env_pkgs = env_pkgs.split(':') #", "= argparse.ArgumentParser( 'cmr', description=main_desc, formatter_class=HelpFormatterWithSubCommands, add_help=True, allow_abbrev=True) # Add subparsers subparsers = parser.add_subparsers(", "formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for rgb_table_parser rgb_table_parser.set_defaults(func=cli_cmap_colors, ncolors=None) # MK_CMOD COMMAND #", "def cli_cmap_type(): # Import cmap packages import_cmap_pkgs() # Print cmap type print(cmr.get_cmap_type(get_cmap(ARGS.cmap))) #", "CMR_CMAP_PKGS environment variable env_pkgs = os.environ.get('CMR_CMAP_PKGS', None) # Add env_pkgs to cmap_pkgs if", "as registered in *matplotlib* or the \" \"object path of a colormap (e.g.,", "'hex'], type=str, dest='return_fmt') # Add cmap_colors subparser cmap_colors_parser = subparsers.add_parser( 'cmcolors', parents=[cmap_parent_parser, take_colors_parent_parser],", "os.environ.get('CMR_CMAP_PKGS', None) # Add env_pkgs to cmap_pkgs if it is not empty if", "Import the provided object from this module for obj in obj_path: cmap =", "sys.platform.startswith(('darwin', 'linux')): env_pkgs = env_pkgs.split(':') # Else, ignore the variable else: env_pkgs =", "formatter_class=HelpFormatterWithSubCommands, add_help=True, allow_abbrev=True) # Add subparsers subparsers = parser.add_subparsers( title='commands', metavar='COMMAND') # OPTIONAL", "-> import a.b; \" \"cmap = a.b.c.d)\"), metavar='CMAP', action='store', type=str) # BIBTEX COMMAND", "add_help=True) # Add 'N' argument cmap_colors_parser.add_argument( 'ncolors', help=\"Number of colors to take\", metavar='N',", "add_help=True) # Set defaults for cmap_type_parser cmap_type_parser.set_defaults(func=cli_cmap_type) # CMAP_COLORS COMMAND # Obtain the", "# Override the add_argument function def add_argument(self, action): # Check if the help", "module cmap_path = cmr.create_cmap_mod(ARGS.cmap, save_dir=ARGS.dir) # Print on commandline that module has been", "# %% GLOBALS # Define main description of this package main_desc = (\"CMasher:", "If this does not work, try to expand given cmap in setuptools-style except", "all remaining desc_lines for line in desc_lines: # Format and add to parts", "'\\n'+' '*help_position) # Split the lines of the subcommand description desc_lines = self._split_lines(description,", "in setuptools-style except ValueError: # Check if cmap contains a colon if ':'", "('float', 'norm'): np.savetxt(sys.stdout, colors, '%.8f') elif ARGS.return_fmt in ('int', '8bit'): np.savetxt(sys.stdout, colors, '%i')", "imports import argparse from importlib import import_module import os import sys # Package", "requested def get_cmap(cmap): # Try to obtain the colormap from MPL try: cmap", "colormaps def import_cmap_pkgs(): # Define set of packages with colormaps cmap_pkgs = {'cmocean',", "if it is not empty if env_pkgs is not None: # If Windows,", "for several commands cmap_parent_parser = argparse.ArgumentParser(add_help=False) # Add 'cmap' argument cmap_parent_parser.add_argument( 'cmap', help=(\"Name", "function formats the description of a subcommand with given name def format_subcommands(self, name,", "str): # Print error and exit print(\"Requested 'CMAP' ({!r}) cannot be found!\".format(cmap)) sys.exit()", "commands cmap_parent_parser = argparse.ArgumentParser(add_help=False) # Add 'cmap' argument cmap_parent_parser.add_argument( 'cmap', help=(\"Name of colormap", "the CLI and is called whenever `cmr` is invoked from the command-line. \"\"\"", "metavar='COMMAND') # OPTIONAL ARGUMENTS # Add 'version' argument parser.add_argument( '-v', '--version', action='version', version=\"CMasher", "'*self._current_indent, name, name_width, ' ' if(len(name) <= name_width) else '\\n'+' '*help_position) # Split", "imports from cmasher import __version__ import cmasher as cmr # All declaration __all__", "this action is required if action.help is not argparse.SUPPRESS: # Check if this", "with given name def format_subcommands(self, name, description): # Determine the positions and widths", "np.savetxt(sys.stdout, colors, '%i') else: np.savetxt(sys.stdout, colors, '%s') # This function handles the 'mkcmod'", "# Add cmap_type subparser cmap_type_parser = subparsers.add_parser( 'cmtype', parents=[cmap_parent_parser], description=e13.get_main_desc(cmr.get_cmap_type), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) #", "and return return(''.join(parts)) # %% COMMAND FUNCTION DEFINITIONS # This function handles the", "# Add 'cmap' argument cmap_parent_parser.add_argument( 'cmap', help=(\"Name of colormap to use as registered", "parts parts.append(\"%s%s\\n\" % (' '*help_position, line)) # Convert to a single string and", "formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for cmap_type_parser cmap_type_parser.set_defaults(func=cli_cmap_type) # CMAP_COLORS COMMAND # Obtain", "= subparsers.add_parser( 'rgbtable', parents=[cmap_parent_parser, take_colors_parent_parser], description=\"Retrieves the RGB values of the provided `cmap`.\",", "Add 'cmap' argument cmap_parent_parser.add_argument( 'cmap', help=(\"Name of colormap to use as registered in", "of *CMasher* colormap to create standalone module for\", metavar='CMAP', action='store', type=str) # Add", "# Create list of all parts of the description of this subcommand parts", "# %% FUNCTION DEFINITIONS # This function obtains the colormap that was requested", "# Add 'cmap_range' optional argument take_colors_parent_parser.add_argument( '--range', help=(\"Normalized value range in the colormap", "module as cmap cmap = import_module(mod_name) # Import the provided object from this", "= cmr.take_cmap_colors(get_cmap(ARGS.cmap), ARGS.ncolors, cmap_range=ARGS.cmap_range, return_fmt=ARGS.return_fmt) # Print the colors line-by-line if ARGS.return_fmt in", "provided `cmap`.\", formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for rgb_table_parser rgb_table_parser.set_defaults(func=cli_cmap_colors, ncolors=None) # MK_CMOD", "import_module(mod_name) # Import the provided object from this module for obj in obj_path:", "desc_lines for line in desc_lines: # Format and add to parts parts.append(\"%s%s\\n\" %", "Parse the arguments global ARGS ARGS = parser.parse_args() # If arguments is empty", "func was provided), show help if 'func' not in ARGS: parser.print_help() # Else,", "obtain the colormap from MPL try: cmap = mplcm.get_cmap(cmap) # If this does", "= mplcm.get_cmap(cmap) # If this does not work, try to expand given cmap", "`cmap`.\", formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for rgb_table_parser rgb_table_parser.set_defaults(func=cli_cmap_colors, ncolors=None) # MK_CMOD COMMAND", "add_help=True) # Set defaults for bibtex_parser bibtex_parser.set_defaults(func=cli_bibtex) # CMAP_TYPE COMMAND # Add cmap_type", "Create list of all parts of the description of this subcommand parts =", "description=e13.get_main_desc(cmr.get_bibtex), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for bibtex_parser bibtex_parser.set_defaults(func=cli_bibtex) # CMAP_TYPE COMMAND #", "Add 'cmap' argument mk_cmod_parser.add_argument( 'cmap', help=\"Name of *CMasher* colormap to create standalone module", "cmap_parent_parser = argparse.ArgumentParser(add_help=False) # Add 'cmap' argument cmap_parent_parser.add_argument( 'cmap', help=(\"Name of colormap to", "action if isinstance(action, argparse._SubParsersAction): # If so, sort action.choices on name names =", "imports import e13tools as e13 from matplotlib import cm as mplcm import numpy", "help=\"Path to directory where the module must be saved\", action='store', default=cmr.create_cmap_mod.__kwdefaults__['save_dir'], type=str) #", "Print cmap type print(cmr.get_cmap_type(get_cmap(ARGS.cmap))) # This function handles the 'take_cmap_colors' subcommand def cli_cmap_colors():", "import numpy as np # CMasher imports from cmasher import __version__ import cmasher", "module has been created print(\"Created standalone colormap module of %r in %r.\" %", "and exit print(\"Requested 'CMAP' ({!r}) cannot be found!\".format(cmap)) sys.exit() # Return cmap return(cmap)", "take_cmap_colors defaults = cmr.take_cmap_colors.__kwdefaults__ # Create a take_colors parser take_colors_parent_parser = argparse.ArgumentParser(add_help=False) #", "defaults for cmap_colors_parser cmap_colors_parser.set_defaults(func=cli_cmap_colors) # RGB_TABLE COMMAND # Add rgb_table subparser rgb_table_parser =", "default arguments of take_cmap_colors defaults = cmr.take_cmap_colors.__kwdefaults__ # Create a take_colors parser take_colors_parent_parser", "Print the colors line-by-line if ARGS.return_fmt in ('float', 'norm'): np.savetxt(sys.stdout, colors, '%.8f') elif", "# CMasher imports from cmasher import __version__ import cmasher as cmr # All", "'*help_position, line)) # Convert to a single string and return return(''.join(parts)) # %%", "'dir' optional argument mk_cmod_parser.add_argument( '-d', '--dir', help=\"Path to directory where the module must", "ARGS.return_fmt in ('int', '8bit'): np.savetxt(sys.stdout, colors, '%i') else: np.savetxt(sys.stdout, colors, '%s') # This", "help if 'func' not in ARGS: parser.print_help() # Else, call the corresponding function", "parts = [name, desc_lines.pop(0), '\\n'] # Loop over all remaining desc_lines for line", "else: np.savetxt(sys.stdout, colors, '%s') # This function handles the 'mkcmod' subcommand def cli_mk_cmod():", "semicolons if sys.platform.startswith('win'): env_pkgs = env_pkgs.split(';') # Else, if UNIX, split variable at", "mplcm import numpy as np # CMasher imports from cmasher import __version__ import", "env_pkgs is not None: # If Windows, split variable at semicolons if sys.platform.startswith('win'):", "not argparse.SUPPRESS: # Check if this action is a subparser's action if isinstance(action,", "cmap packages import_cmap_pkgs() # Print cmap type print(cmr.get_cmap_type(get_cmap(ARGS.cmap))) # This function handles the", "parser for several commands cmap_parent_parser = argparse.ArgumentParser(add_help=False) # Add 'cmap' argument cmap_parent_parser.add_argument( 'cmap',", "Loop over all subcommands defined in the action for name in names: #", "import each package for cmap_pkg in cmap_pkgs: try: import_module(cmap_pkg) except ImportError: pass #", "action='version', version=\"CMasher v{}\".format(__version__)) # Create a cmap parser for several commands cmap_parent_parser =", "whenever `cmr` is invoked from the command-line. \"\"\" # Initialize argparser parser =", "type=str, dest='return_fmt') # Add cmap_colors subparser cmap_colors_parser = subparsers.add_parser( 'cmcolors', parents=[cmap_parent_parser, take_colors_parent_parser], description=e13.get_main_desc(cmr.take_cmap_colors),", "# This function handles the 'cmap_type' subcommand def cli_cmap_type(): # Import cmap packages", "(no func was provided), show help if 'func' not in ARGS: parser.print_help() #", "subcommand description desc_lines = self._split_lines(description, help_width) # Create list of all parts of", "action is a subparser's action if isinstance(action, argparse._SubParsersAction): # If so, sort action.choices", "import_module(cmap_pkg) except ImportError: pass # %% MAIN FUNCTION def main(): \"\"\" This is", "as np # CMasher imports from cmasher import __version__ import cmasher as cmr", "with colormaps cmap_pkgs = {'cmocean', 'colorcet', 'palettable'} # Obtain packages from CMR_CMAP_PKGS environment", "colors in\", action='store', default=defaults['return_fmt'], choices=['float', 'norm', 'int', '8bit', 'str', 'hex'], type=str, dest='return_fmt') #", "variable else: env_pkgs = [] # Add pkgs cmap_pkgs.update(env_pkgs) # Attempt to import", "exit print(\"Requested 'CMAP' ({!r}) cannot be found!\".format(cmap)) sys.exit() # Return cmap return(cmap) #", "is the main function of the CLI and is called whenever `cmr` is", "GLOBALS # Define main description of this package main_desc = (\"CMasher: Scientific colormaps", "obj_name = cmap.split(':', 1) obj_path = obj_name.split('.') # Import the provided module as", "If arguments is empty (no func was provided), show help if 'func' not", "# Call super method in all other cases else: super().add_argument(action) # This function", "colormaps for making accessible, informative\" \" and 'cmashing' plots\") # %% CLASS DEFINITIONS", "Define set of packages with colormaps cmap_pkgs = {'cmocean', 'colorcet', 'palettable'} # Obtain", "metavar='N', action='store', type=int) # Set defaults for cmap_colors_parser cmap_colors_parser.set_defaults(func=cli_cmap_colors) # RGB_TABLE COMMAND #", "cmap in setuptools-style except ValueError: # Check if cmap contains a colon if", "= obj_name.split('.') # Import the provided module as cmap cmap = import_module(mod_name) #", "from MPL try: cmap = mplcm.get_cmap(cmap) # If this does not work, try", "print(cmr.get_cmap_type(get_cmap(ARGS.cmap))) # This function handles the 'take_cmap_colors' subcommand def cli_cmap_colors(): # Import cmap", "cmap_colors_parser = subparsers.add_parser( 'cmcolors', parents=[cmap_parent_parser, take_colors_parent_parser], description=e13.get_main_desc(cmr.take_cmap_colors), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Add 'N' argument", "argument cmap_colors_parser.add_argument( 'ncolors', help=\"Number of colors to take\", metavar='N', action='store', type=int) # Set", "of the help texts help_position = min(self._action_max_length+2, self._max_help_position) help_width = max(self._width-help_position, 11) name_width", "saved\", action='store', default=cmr.create_cmap_mod.__kwdefaults__['save_dir'], type=str) # Set defaults for mk_cmod_parser mk_cmod_parser.set_defaults(func=cli_mk_cmod) # Parse the", "a.b.c.d)\"), metavar='CMAP', action='store', type=str) # BIBTEX COMMAND # Add bibtex subparser bibtex_parser =", "= subparsers.add_parser( 'bibtex', description=e13.get_main_desc(cmr.get_bibtex), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for bibtex_parser bibtex_parser.set_defaults(func=cli_bibtex) #", "print(\"Requested 'CMAP' ({!r}) cannot be found!\".format(cmap)) sys.exit() # Return cmap return(cmap) # This", "%% MAIN FUNCTION def main(): \"\"\" This is the main function of the", "subparser cmap_type_parser = subparsers.add_parser( 'cmtype', parents=[cmap_parent_parser], description=e13.get_main_desc(cmr.get_cmap_type), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for", "cmap_pkgs if it is not empty if env_pkgs is not None: # If", "colormap (e.g., 'a.b:c.d' -> import a.b; \" \"cmap = a.b.c.d)\"), metavar='CMAP', action='store', type=str)", "# Obtain the optional default arguments of take_cmap_colors defaults = cmr.take_cmap_colors.__kwdefaults__ # Create", "# Package imports import e13tools as e13 from matplotlib import cm as mplcm", "try to expand given cmap in setuptools-style except ValueError: # Check if cmap", "it is not empty if env_pkgs is not None: # If Windows, split", "add_help=True) # Add 'cmap' argument mk_cmod_parser.add_argument( 'cmap', help=\"Name of *CMasher* colormap to create", "is invoked from the command-line. \"\"\" # Initialize argparser parser = argparse.ArgumentParser( 'cmr',", "DEFINITIONS # Define formatter that automatically extracts help strings of subcommands class HelpFormatterWithSubCommands(argparse.ArgumentDefaultsHelpFormatter):", "from importlib import import_module import os import sys # Package imports import e13tools", "proper formatting name = \"{0}{1: <{2}}{3}\".format( ' '*self._current_indent, name, name_width, ' ' if(len(name)", "the optional default arguments of take_cmap_colors defaults = cmr.take_cmap_colors.__kwdefaults__ # Create a take_colors", "be saved\", action='store', default=cmr.create_cmap_mod.__kwdefaults__['save_dir'], type=str) # Set defaults for mk_cmod_parser mk_cmod_parser.set_defaults(func=cli_mk_cmod) # Parse", "to create standalone module for\", metavar='CMAP', action='store', type=str) # Add 'dir' optional argument", "Obtain packages from CMR_CMAP_PKGS environment variable env_pkgs = os.environ.get('CMR_CMAP_PKGS', None) # Add env_pkgs", "subcommand def cli_cmap_type(): # Import cmap packages import_cmap_pkgs() # Print cmap type print(cmr.get_cmap_type(get_cmap(ARGS.cmap)))", "take_colors_parent_parser = argparse.ArgumentParser(add_help=False) # Add 'cmap_range' optional argument take_colors_parent_parser.add_argument( '--range', help=(\"Normalized value range", "RGB values of the provided `cmap`.\", formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for rgb_table_parser", "as e13 from matplotlib import cm as mplcm import numpy as np #", "that module has been created print(\"Created standalone colormap module of %r in %r.\"", "function handles the 'take_cmap_colors' subcommand def cli_cmap_colors(): # Import cmap packages import_cmap_pkgs() #", "mk_cmod_parser.add_argument( '-d', '--dir', help=\"Path to directory where the module must be saved\", action='store',", "This function handles the 'bibtex' subcommand def cli_bibtex(): cmr.get_bibtex() # This function handles", "the 'mkcmod' subcommand def cli_mk_cmod(): # Create cmap module cmap_path = cmr.create_cmap_mod(ARGS.cmap, save_dir=ARGS.dir)", "formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults for bibtex_parser bibtex_parser.set_defaults(func=cli_bibtex) # CMAP_TYPE COMMAND # Add", "import e13tools as e13 from matplotlib import cm as mplcm import numpy as", "add to parts parts.append(\"%s%s\\n\" % (' '*help_position, line)) # Convert to a single", "# Print the colors line-by-line if ARGS.return_fmt in ('float', 'norm'): np.savetxt(sys.stdout, colors, '%.8f')", "OPTIONAL ARGUMENTS # Add 'version' argument parser.add_argument( '-v', '--version', action='version', version=\"CMasher v{}\".format(__version__)) #", "Add bibtex subparser bibtex_parser = subparsers.add_parser( 'bibtex', description=e13.get_main_desc(cmr.get_bibtex), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) # Set defaults", "to the proper formatting name = \"{0}{1: <{2}}{3}\".format( ' '*self._current_indent, name, name_width, '", "the module must be saved\", action='store', default=cmr.create_cmap_mod.__kwdefaults__['save_dir'], type=str) # Set defaults for mk_cmod_parser", "get_cmap(cmap): # Try to obtain the colormap from MPL try: cmap = mplcm.get_cmap(cmap)", "e13tools as e13 from matplotlib import cm as mplcm import numpy as np", "Create cmap module cmap_path = cmr.create_cmap_mod(ARGS.cmap, save_dir=ARGS.dir) # Print on commandline that module", "all other cases else: super().add_argument(action) # This function formats the description of a", "main function of the CLI and is called whenever `cmr` is invoked from", "BIBTEX COMMAND # Add bibtex subparser bibtex_parser = subparsers.add_parser( 'bibtex', description=e13.get_main_desc(cmr.get_bibtex), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True)", "# Import the provided module as cmap cmap = import_module(mod_name) # Import the", "dest='cmap_range') # Add 'fmt' optional argument take_colors_parent_parser.add_argument( '--fmt', help=\"Format to return colors in\",", "= [name, desc_lines.pop(0), '\\n'] # Loop over all remaining desc_lines for line in", "optional argument mk_cmod_parser.add_argument( '-d', '--dir', help=\"Path to directory where the module must be", "return_fmt=ARGS.return_fmt) # Print the colors line-by-line if ARGS.return_fmt in ('float', 'norm'): np.savetxt(sys.stdout, colors,", "obj_path = obj_name.split('.') # Import the provided module as cmap cmap = import_module(mod_name)", "'--version', action='version', version=\"CMasher v{}\".format(__version__)) # Create a cmap parser for several commands cmap_parent_parser", "# Import cmap packages import_cmap_pkgs() # Obtain the colors colors = cmr.take_cmap_colors(get_cmap(ARGS.cmap), ARGS.ncolors,", "# %% MAIN FUNCTION def main(): \"\"\" This is the main function of", "= self._split_lines(description, help_width) # Create list of all parts of the description of", "in all other cases else: super().add_argument(action) # This function formats the description of", "%% IMPORTS # Built-in imports import argparse from importlib import import_module import os", "%r.\" % (ARGS.cmap, cmap_path)) # %% FUNCTION DEFINITIONS # This function obtains the", "# This function formats the description of a subcommand with given name def", "Determine the positions and widths of the help texts help_position = min(self._action_max_length+2, self._max_help_position)", "This function formats the description of a subcommand with given name def format_subcommands(self,", "object from this module for obj in obj_path: cmap = getattr(cmap, obj) #", "subcommand and add it self._add_item(self.format_subcommands, [name, subparser.description]) # Call super method in all", "action is required if action.help is not argparse.SUPPRESS: # Check if this action", "format_subcommands(self, name, description): # Determine the positions and widths of the help texts", "Add cmap_colors subparser cmap_colors_parser = subparsers.add_parser( 'cmcolors', parents=[cmap_parent_parser, take_colors_parent_parser], description=e13.get_main_desc(cmr.take_cmap_colors), formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=True) #", "MPL try: cmap = mplcm.get_cmap(cmap) # If this does not work, try to", "obj in obj_path: cmap = getattr(cmap, obj) # If cmap is still a", "name = \"{0}{1: <{2}}{3}\".format( ' '*self._current_indent, name, name_width, ' ' if(len(name) <= name_width)" ]
[ "import argparse from pathlib import Path from contextlib import contextmanager import pandas as", "progress_msg(\"Reading item properties... (this takes a bit)\") item_props_df1 = read_item_props(path / PROPS_FILE_1) item_props_df2", "column\"): pd.read_parquet(output_file, columns=['cryptic_attrs_cat']) with timed(\"load file - all columns *except* these two\"): cols", "in path.iterdir()} if not files_in_path >= INPUT_FILENAMES: sys.exit(f'Missing one or more input files:", "how='inner', on='itemid') progress_msg(\"Making columns more queryable...\") events['price'] = events['790'].str[1:].astype(float) / 1000 events.drop(columns=['790'], inplace=True)", "(events.memory_usage(deep=True) / 1024 ** 2).round(decimals=2) progress_msg(f'Size of DataFrame columns in memory (in MB):')", "= Path(args.path) if not path.exists() or not path.is_dir(): sys.exit(f'No such directory: {path}') files_in_path", "EXPECTED_EVENT_COUNT = 2_500_516 def progress_msg(msg: str): print(f\"\\033[33m{msg}\\033[0m\") # Yellow, just yellow @contextmanager def", "def progress_msg(msg: str): print(f\"\\033[33m{msg}\\033[0m\") # Yellow, just yellow @contextmanager def timed(caption: str): start", "with timed(\"load file - all columns\"): pd.read_parquet(output_file) with timed(\"load file - just the", "type=str, help='Directory where downloaded dataset files are found and output file will be", "as pd from pandas import DataFrame EVENTS_FILE = 'events.csv' PROPS_FILE_1 = 'item_properties_part1.csv' PROPS_FILE_2", "/ 1024 ** 2:.1f}MB\") with timed(\"load file - all columns\"): pd.read_parquet(output_file) with timed(\"load", "if not files_in_path >= INPUT_FILENAMES: sys.exit(f'Missing one or more input files: {INPUT_FILENAMES}') ingest(path)", "item properties from cached file {item_props_tempfile}\") item_props_df = pd.read_parquet(item_props_tempfile) else: progress_msg(\"Reading item properties...", "ITEM_PROPERTY_COLUMNS = {'categoryid', 'available', '790', '888'} EXPECTED_EVENT_COUNT = 2_500_516 def progress_msg(msg: str): print(f\"\\033[33m{msg}\\033[0m\")", "cached file {item_props_tempfile}\") item_props_df = pd.read_parquet(item_props_tempfile) else: progress_msg(\"Reading item properties... (this takes a", "item properties... (this takes a bit)\") item_props_df1 = read_item_props(path / PROPS_FILE_1) item_props_df2 =", "[col for col in events.dtypes.index if col not in ['cryptic_attrs', 'cryptic_attrs_cat']] pd.read_parquet(output_file, columns=cols)", "events.drop(columns=['790'], inplace=True) events['available'] = events['available'].astype(int).astype(bool) events['categoryid'] = events['categoryid'].astype('category') events['event'] = events['event'].astype('category') events.rename(columns={'888': 'cryptic_attrs'},", "{len(events)}\") output_file = path / 'retailrocket.parquet' events.to_parquet(output_file) col_memory_sizes = (events.memory_usage(deep=True) / 1024 **", "= pd.merge(events, item_props_df, how='inner', on='itemid') progress_msg(\"Making columns more queryable...\") events['price'] = events['790'].str[1:].astype(float) /", "for col in events.dtypes.index if col not in ['cryptic_attrs', 'cryptic_attrs_cat']] pd.read_parquet(output_file, columns=cols) if", "total = time.time() - start print(f\"Time to {caption}: {total:.3f} seconds\") # Read item", "found and output file will be written') args = parser.parse_args() path = Path(args.path)", "pandas import DataFrame EVENTS_FILE = 'events.csv' PROPS_FILE_1 = 'item_properties_part1.csv' PROPS_FILE_2 = 'item_properties_part2.csv' INPUT_FILENAMES", "events['categoryid'].astype('category') events['event'] = events['event'].astype('category') events.rename(columns={'888': 'cryptic_attrs'}, inplace=True) progress_msg(\"Storing 'cryptic_attrs' also as categorical column", "item properties files, filter for relevant columns and 'pivot' its structure from rows", "of event types:\") print(events['event'].value_counts()) if len(events) != EXPECTED_EVENT_COUNT: progress_msg(f\"WARNING: Expected {EXPECTED_EVENT_COUNT} events, but", "= 'item_properties_part1.csv' PROPS_FILE_2 = 'item_properties_part2.csv' INPUT_FILENAMES = {EVENTS_FILE, PROPS_FILE_1, PROPS_FILE_2} ITEM_PROPERTY_COLUMNS = {'categoryid',", "time import argparse from pathlib import Path from contextlib import contextmanager import pandas", "https://www.kaggle.com/retailrocket/ecommerce-dataset/)') parser.add_argument( 'path', type=str, help='Directory where downloaded dataset files are found and output", "/ 1000 events.drop(columns=['790'], inplace=True) events['available'] = events['available'].astype(int).astype(bool) events['categoryid'] = events['categoryid'].astype('category') events['event'] = events['event'].astype('category')", "progress_msg(f\"Reading item properties from cached file {item_props_tempfile}\") item_props_df = pd.read_parquet(item_props_tempfile) else: progress_msg(\"Reading item", "time.time() - start print(f\"Time to {caption}: {total:.3f} seconds\") # Read item properties files,", "queryable...\") events['price'] = events['790'].str[1:].astype(float) / 1000 events.drop(columns=['790'], inplace=True) events['available'] = events['available'].astype(int).astype(bool) events['categoryid'] =", "pd.merge(events, item_props_df, how='inner', on='itemid') progress_msg(\"Making columns more queryable...\") events['price'] = events['790'].str[1:].astype(float) / 1000", "from cached file {item_props_tempfile}\") item_props_df = pd.read_parquet(item_props_tempfile) else: progress_msg(\"Reading item properties... (this takes", "faster re-runs...\") item_props_df.to_parquet(item_props_tempfile) with timed(\"read & transform user events\"): progress_msg(\"Reading user events...\") events", "output file to: {output_file}, size: {output_file.stat().st_size / 1024 ** 2:.1f}MB\") with timed(\"load file", "or not path.is_dir(): sys.exit(f'No such directory: {path}') files_in_path = {f.name for f in", "argparse from pathlib import Path from contextlib import contextmanager import pandas as pd", "file to: {output_file}, size: {output_file.stat().st_size / 1024 ** 2:.1f}MB\") with timed(\"load file -", "EXPECTED_EVENT_COUNT: progress_msg(f\"WARNING: Expected {EXPECTED_EVENT_COUNT} events, but final DataFrame has {len(events)}\") output_file = path", "if col not in ['cryptic_attrs', 'cryptic_attrs_cat']] pd.read_parquet(output_file, columns=cols) if __name__ == '__main__': parser", "'events.csv' PROPS_FILE_1 = 'item_properties_part1.csv' PROPS_FILE_2 = 'item_properties_part2.csv' INPUT_FILENAMES = {EVENTS_FILE, PROPS_FILE_1, PROPS_FILE_2} ITEM_PROPERTY_COLUMNS", "categorical column 'cryptic_attrs_cat'...\") events['cryptic_attrs_cat'] = events['cryptic_attrs'].astype('category') events.reset_index(drop=True) progress_msg(\"Excerpt from final DataFrame:\") print(events) progress_msg(\"Columns", "'cryptic_attrs' also as categorical column 'cryptic_attrs_cat'...\") events['cryptic_attrs_cat'] = events['cryptic_attrs'].astype('category') events.reset_index(drop=True) progress_msg(\"Excerpt from final", "import contextmanager import pandas as pd from pandas import DataFrame EVENTS_FILE = 'events.csv'", "of DataFrame columns in memory (in MB):') print(col_memory_sizes) progress_msg(f\"==> Saved output file to:", "= 2_500_516 def progress_msg(msg: str): print(f\"\\033[33m{msg}\\033[0m\") # Yellow, just yellow @contextmanager def timed(caption:", "import sys import time import argparse from pathlib import Path from contextlib import", "DataFrame EVENTS_FILE = 'events.csv' PROPS_FILE_1 = 'item_properties_part1.csv' PROPS_FILE_2 = 'item_properties_part2.csv' INPUT_FILENAMES = {EVENTS_FILE,", "= events['categoryid'].astype('category') events['event'] = events['event'].astype('category') events.rename(columns={'888': 'cryptic_attrs'}, inplace=True) progress_msg(\"Storing 'cryptic_attrs' also as categorical", "argparse.ArgumentParser( description='Ingest RetailRocket dataset (to download: https://www.kaggle.com/retailrocket/ecommerce-dataset/)') parser.add_argument( 'path', type=str, help='Directory where downloaded", "PROPS_FILE_1 = 'item_properties_part1.csv' PROPS_FILE_2 = 'item_properties_part2.csv' INPUT_FILENAMES = {EVENTS_FILE, PROPS_FILE_1, PROPS_FILE_2} ITEM_PROPERTY_COLUMNS =", "= time.time() yield total = time.time() - start print(f\"Time to {caption}: {total:.3f} seconds\")", "print(events.dtypes) progress_msg(\"Breakdown of event types:\") print(events['event'].value_counts()) if len(events) != EXPECTED_EVENT_COUNT: progress_msg(f\"WARNING: Expected {EXPECTED_EVENT_COUNT}", "events, but final DataFrame has {len(events)}\") output_file = path / 'retailrocket.parquet' events.to_parquet(output_file) col_memory_sizes", "files are found and output file will be written') args = parser.parse_args() path", "progress_msg(\"Excerpt from final DataFrame:\") print(events) progress_msg(\"Columns types (a.k.a. dtypes):\") print(events.dtypes) progress_msg(\"Breakdown of event", "dataset files are found and output file will be written') args = parser.parse_args()", "EVENTS_FILE) progress_msg(\"Joining events with item properties...\") events = pd.merge(events, item_props_df, how='inner', on='itemid') progress_msg(\"Making", "= df[df['property'].isin(ITEM_PROPERTY_COLUMNS)] first_value_per_item = df.groupby([\"itemid\", \"property\"])[\"value\"].first() df = first_value_per_item.to_frame() df = df.unstack(level=-1) df.columns", "events['price'] = events['790'].str[1:].astype(float) / 1000 events.drop(columns=['790'], inplace=True) events['available'] = events['available'].astype(int).astype(bool) events['categoryid'] = events['categoryid'].astype('category')", "progress_msg(f'Size of DataFrame columns in memory (in MB):') print(col_memory_sizes) progress_msg(f\"==> Saved output file", "= {EVENTS_FILE, PROPS_FILE_1, PROPS_FILE_2} ITEM_PROPERTY_COLUMNS = {'categoryid', 'available', '790', '888'} EXPECTED_EVENT_COUNT = 2_500_516", "with timed(\"load file - just the 'cryptic_attrs' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs']) with timed(\"load file", "yield total = time.time() - start print(f\"Time to {caption}: {total:.3f} seconds\") # Read", "contextlib import contextmanager import pandas as pd from pandas import DataFrame EVENTS_FILE =", "file - all columns\"): pd.read_parquet(output_file) with timed(\"load file - just the 'cryptic_attrs' column\"):", "a bit)\") item_props_df1 = read_item_props(path / PROPS_FILE_1) item_props_df2 = read_item_props(path / PROPS_FILE_2) item_props_df", "time.time() yield total = time.time() - start print(f\"Time to {caption}: {total:.3f} seconds\") #", "= {f.name for f in path.iterdir()} if not files_in_path >= INPUT_FILENAMES: sys.exit(f'Missing one", "import DataFrame EVENTS_FILE = 'events.csv' PROPS_FILE_1 = 'item_properties_part1.csv' PROPS_FILE_2 = 'item_properties_part2.csv' INPUT_FILENAMES =", "= time.time() - start print(f\"Time to {caption}: {total:.3f} seconds\") # Read item properties", "file - just the 'cryptic_attrs_cat' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs_cat']) with timed(\"load file - all", "2:.1f}MB\") with timed(\"load file - all columns\"): pd.read_parquet(output_file) with timed(\"load file - just", "pd.read_parquet(output_file, columns=['cryptic_attrs_cat']) with timed(\"load file - all columns *except* these two\"): cols =", "import time import argparse from pathlib import Path from contextlib import contextmanager import", "print(f\"Time to {caption}: {total:.3f} seconds\") # Read item properties files, filter for relevant", "parser = argparse.ArgumentParser( description='Ingest RetailRocket dataset (to download: https://www.kaggle.com/retailrocket/ecommerce-dataset/)') parser.add_argument( 'path', type=str, help='Directory", "files, filter for relevant columns and 'pivot' its structure from rows to columns", "str): print(f\"\\033[33m{msg}\\033[0m\") # Yellow, just yellow @contextmanager def timed(caption: str): start = time.time()", "description='Ingest RetailRocket dataset (to download: https://www.kaggle.com/retailrocket/ecommerce-dataset/)') parser.add_argument( 'path', type=str, help='Directory where downloaded dataset", "= read_item_props(path / PROPS_FILE_2) item_props_df = item_props_df1.combine_first(item_props_df2) progress_msg(f\"Storing item properties to {item_props_tempfile} for", "pd.read_parquet(output_file, columns=cols) if __name__ == '__main__': parser = argparse.ArgumentParser( description='Ingest RetailRocket dataset (to", "= parser.parse_args() path = Path(args.path) if not path.exists() or not path.is_dir(): sys.exit(f'No such", "= pd.read_csv(path / EVENTS_FILE) progress_msg(\"Joining events with item properties...\") events = pd.merge(events, item_props_df,", "item_props_df = pd.read_parquet(item_props_tempfile) else: progress_msg(\"Reading item properties... (this takes a bit)\") item_props_df1 =", "'item_properties_part1.csv' PROPS_FILE_2 = 'item_properties_part2.csv' INPUT_FILENAMES = {EVENTS_FILE, PROPS_FILE_1, PROPS_FILE_2} ITEM_PROPERTY_COLUMNS = {'categoryid', 'available',", "(a.k.a. dtypes):\") print(events.dtypes) progress_msg(\"Breakdown of event types:\") print(events['event'].value_counts()) if len(events) != EXPECTED_EVENT_COUNT: progress_msg(f\"WARNING:", "{item_props_tempfile}\") item_props_df = pd.read_parquet(item_props_tempfile) else: progress_msg(\"Reading item properties... (this takes a bit)\") item_props_df1", "written') args = parser.parse_args() path = Path(args.path) if not path.exists() or not path.is_dir():", "df.unstack(level=-1) df.columns = df.columns.droplevel(0) return df def ingest(path: Path): with timed(\"read & transform", "= df.columns.droplevel(0) return df def ingest(path: Path): with timed(\"read & transform item properties", "timed(\"load file - all columns\"): pd.read_parquet(output_file) with timed(\"load file - just the 'cryptic_attrs'", "from contextlib import contextmanager import pandas as pd from pandas import DataFrame EVENTS_FILE", "PROPS_FILE_1) item_props_df2 = read_item_props(path / PROPS_FILE_2) item_props_df = item_props_df1.combine_first(item_props_df2) progress_msg(f\"Storing item properties to", "columns=['cryptic_attrs']) with timed(\"load file - just the 'cryptic_attrs_cat' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs_cat']) with timed(\"load", "pd.read_csv(filepath) df = df[df['property'].isin(ITEM_PROPERTY_COLUMNS)] first_value_per_item = df.groupby([\"itemid\", \"property\"])[\"value\"].first() df = first_value_per_item.to_frame() df =", "properties to {item_props_tempfile} for faster re-runs...\") item_props_df.to_parquet(item_props_tempfile) with timed(\"read & transform user events\"):", "progress_msg(\"Reading user events...\") events = pd.read_csv(path / EVENTS_FILE) progress_msg(\"Joining events with item properties...\")", "pd from pandas import DataFrame EVENTS_FILE = 'events.csv' PROPS_FILE_1 = 'item_properties_part1.csv' PROPS_FILE_2 =", "= {'categoryid', 'available', '790', '888'} EXPECTED_EVENT_COUNT = 2_500_516 def progress_msg(msg: str): print(f\"\\033[33m{msg}\\033[0m\") #", "= 'events.csv' PROPS_FILE_1 = 'item_properties_part1.csv' PROPS_FILE_2 = 'item_properties_part2.csv' INPUT_FILENAMES = {EVENTS_FILE, PROPS_FILE_1, PROPS_FILE_2}", "1000 events.drop(columns=['790'], inplace=True) events['available'] = events['available'].astype(int).astype(bool) events['categoryid'] = events['categoryid'].astype('category') events['event'] = events['event'].astype('category') events.rename(columns={'888':", "INPUT_FILENAMES = {EVENTS_FILE, PROPS_FILE_1, PROPS_FILE_2} ITEM_PROPERTY_COLUMNS = {'categoryid', 'available', '790', '888'} EXPECTED_EVENT_COUNT =", "import pandas as pd from pandas import DataFrame EVENTS_FILE = 'events.csv' PROPS_FILE_1 =", "(to download: https://www.kaggle.com/retailrocket/ecommerce-dataset/)') parser.add_argument( 'path', type=str, help='Directory where downloaded dataset files are found", "output file will be written') args = parser.parse_args() path = Path(args.path) if not", "pd.read_csv(path / EVENTS_FILE) progress_msg(\"Joining events with item properties...\") events = pd.merge(events, item_props_df, how='inner',", "events.reset_index(drop=True) progress_msg(\"Excerpt from final DataFrame:\") print(events) progress_msg(\"Columns types (a.k.a. dtypes):\") print(events.dtypes) progress_msg(\"Breakdown of", "help='Directory where downloaded dataset files are found and output file will be written')", "types (a.k.a. dtypes):\") print(events.dtypes) progress_msg(\"Breakdown of event types:\") print(events['event'].value_counts()) if len(events) != EXPECTED_EVENT_COUNT:", "'cryptic_attrs_cat' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs_cat']) with timed(\"load file - all columns *except* these two\"):", "columns=cols) if __name__ == '__main__': parser = argparse.ArgumentParser( description='Ingest RetailRocket dataset (to download:", "first_value_per_item = df.groupby([\"itemid\", \"property\"])[\"value\"].first() df = first_value_per_item.to_frame() df = df.unstack(level=-1) df.columns = df.columns.droplevel(0)", "'item_properties_part2.csv' INPUT_FILENAMES = {EVENTS_FILE, PROPS_FILE_1, PROPS_FILE_2} ITEM_PROPERTY_COLUMNS = {'categoryid', 'available', '790', '888'} EXPECTED_EVENT_COUNT", "= read_item_props(path / PROPS_FILE_1) item_props_df2 = read_item_props(path / PROPS_FILE_2) item_props_df = item_props_df1.combine_first(item_props_df2) progress_msg(f\"Storing", "if len(events) != EXPECTED_EVENT_COUNT: progress_msg(f\"WARNING: Expected {EXPECTED_EVENT_COUNT} events, but final DataFrame has {len(events)}\")", "are found and output file will be written') args = parser.parse_args() path =", "Read item properties files, filter for relevant columns and 'pivot' its structure from", "{f.name for f in path.iterdir()} if not files_in_path >= INPUT_FILENAMES: sys.exit(f'Missing one or", "*except* these two\"): cols = [col for col in events.dtypes.index if col not", "if __name__ == '__main__': parser = argparse.ArgumentParser( description='Ingest RetailRocket dataset (to download: https://www.kaggle.com/retailrocket/ecommerce-dataset/)')", "'path', type=str, help='Directory where downloaded dataset files are found and output file will", "but final DataFrame has {len(events)}\") output_file = path / 'retailrocket.parquet' events.to_parquet(output_file) col_memory_sizes =", "on='itemid') progress_msg(\"Making columns more queryable...\") events['price'] = events['790'].str[1:].astype(float) / 1000 events.drop(columns=['790'], inplace=True) events['available']", "'pivot' its structure from rows to columns def read_item_props(filepath: Path) -> DataFrame: df", "- just the 'cryptic_attrs' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs']) with timed(\"load file - just the", "print(col_memory_sizes) progress_msg(f\"==> Saved output file to: {output_file}, size: {output_file.stat().st_size / 1024 ** 2:.1f}MB\")", "in events.dtypes.index if col not in ['cryptic_attrs', 'cryptic_attrs_cat']] pd.read_parquet(output_file, columns=cols) if __name__ ==", "EVENTS_FILE = 'events.csv' PROPS_FILE_1 = 'item_properties_part1.csv' PROPS_FILE_2 = 'item_properties_part2.csv' INPUT_FILENAMES = {EVENTS_FILE, PROPS_FILE_1,", "inplace=True) progress_msg(\"Storing 'cryptic_attrs' also as categorical column 'cryptic_attrs_cat'...\") events['cryptic_attrs_cat'] = events['cryptic_attrs'].astype('category') events.reset_index(drop=True) progress_msg(\"Excerpt", "/ 'retailrocket.parquet' events.to_parquet(output_file) col_memory_sizes = (events.memory_usage(deep=True) / 1024 ** 2).round(decimals=2) progress_msg(f'Size of DataFrame", "'available', '790', '888'} EXPECTED_EVENT_COUNT = 2_500_516 def progress_msg(msg: str): print(f\"\\033[33m{msg}\\033[0m\") # Yellow, just", "\"property\"])[\"value\"].first() df = first_value_per_item.to_frame() df = df.unstack(level=-1) df.columns = df.columns.droplevel(0) return df def", "path = Path(args.path) if not path.exists() or not path.is_dir(): sys.exit(f'No such directory: {path}')", "in ['cryptic_attrs', 'cryptic_attrs_cat']] pd.read_parquet(output_file, columns=cols) if __name__ == '__main__': parser = argparse.ArgumentParser( description='Ingest", "events['event'] = events['event'].astype('category') events.rename(columns={'888': 'cryptic_attrs'}, inplace=True) progress_msg(\"Storing 'cryptic_attrs' also as categorical column 'cryptic_attrs_cat'...\")", "with timed(\"read & transform item properties of all products\"): item_props_tempfile = path /", "col in events.dtypes.index if col not in ['cryptic_attrs', 'cryptic_attrs_cat']] pd.read_parquet(output_file, columns=cols) if __name__", "start = time.time() yield total = time.time() - start print(f\"Time to {caption}: {total:.3f}", "column 'cryptic_attrs_cat'...\") events['cryptic_attrs_cat'] = events['cryptic_attrs'].astype('category') events.reset_index(drop=True) progress_msg(\"Excerpt from final DataFrame:\") print(events) progress_msg(\"Columns types", "seconds\") # Read item properties files, filter for relevant columns and 'pivot' its", "'cryptic_attrs'}, inplace=True) progress_msg(\"Storing 'cryptic_attrs' also as categorical column 'cryptic_attrs_cat'...\") events['cryptic_attrs_cat'] = events['cryptic_attrs'].astype('category') events.reset_index(drop=True)", "progress_msg(f\"Storing item properties to {item_props_tempfile} for faster re-runs...\") item_props_df.to_parquet(item_props_tempfile) with timed(\"read & transform", "DataFrame: df = pd.read_csv(filepath) df = df[df['property'].isin(ITEM_PROPERTY_COLUMNS)] first_value_per_item = df.groupby([\"itemid\", \"property\"])[\"value\"].first() df =", "{EVENTS_FILE, PROPS_FILE_1, PROPS_FILE_2} ITEM_PROPERTY_COLUMNS = {'categoryid', 'available', '790', '888'} EXPECTED_EVENT_COUNT = 2_500_516 def", "PROPS_FILE_2} ITEM_PROPERTY_COLUMNS = {'categoryid', 'available', '790', '888'} EXPECTED_EVENT_COUNT = 2_500_516 def progress_msg(msg: str):", "PROPS_FILE_1, PROPS_FILE_2} ITEM_PROPERTY_COLUMNS = {'categoryid', 'available', '790', '888'} EXPECTED_EVENT_COUNT = 2_500_516 def progress_msg(msg:", "just the 'cryptic_attrs_cat' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs_cat']) with timed(\"load file - all columns *except*", "events.rename(columns={'888': 'cryptic_attrs'}, inplace=True) progress_msg(\"Storing 'cryptic_attrs' also as categorical column 'cryptic_attrs_cat'...\") events['cryptic_attrs_cat'] = events['cryptic_attrs'].astype('category')", "- all columns\"): pd.read_parquet(output_file) with timed(\"load file - just the 'cryptic_attrs' column\"): pd.read_parquet(output_file,", "with timed(\"read & transform user events\"): progress_msg(\"Reading user events...\") events = pd.read_csv(path /", "** 2:.1f}MB\") with timed(\"load file - all columns\"): pd.read_parquet(output_file) with timed(\"load file -", "DataFrame columns in memory (in MB):') print(col_memory_sizes) progress_msg(f\"==> Saved output file to: {output_file},", "output_file = path / 'retailrocket.parquet' events.to_parquet(output_file) col_memory_sizes = (events.memory_usage(deep=True) / 1024 ** 2).round(decimals=2)", "events.dtypes.index if col not in ['cryptic_attrs', 'cryptic_attrs_cat']] pd.read_parquet(output_file, columns=cols) if __name__ == '__main__':", "columns=['cryptic_attrs_cat']) with timed(\"load file - all columns *except* these two\"): cols = [col", "item_props_tempfile.exists(): progress_msg(f\"Reading item properties from cached file {item_props_tempfile}\") item_props_df = pd.read_parquet(item_props_tempfile) else: progress_msg(\"Reading", "also as categorical column 'cryptic_attrs_cat'...\") events['cryptic_attrs_cat'] = events['cryptic_attrs'].astype('category') events.reset_index(drop=True) progress_msg(\"Excerpt from final DataFrame:\")", "properties files, filter for relevant columns and 'pivot' its structure from rows to", "inplace=True) events['available'] = events['available'].astype(int).astype(bool) events['categoryid'] = events['categoryid'].astype('category') events['event'] = events['event'].astype('category') events.rename(columns={'888': 'cryptic_attrs'}, inplace=True)", "directory: {path}') files_in_path = {f.name for f in path.iterdir()} if not files_in_path >=", "'cryptic_attrs' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs']) with timed(\"load file - just the 'cryptic_attrs_cat' column\"): pd.read_parquet(output_file,", "relevant columns and 'pivot' its structure from rows to columns def read_item_props(filepath: Path)", "(this takes a bit)\") item_props_df1 = read_item_props(path / PROPS_FILE_1) item_props_df2 = read_item_props(path /", "from final DataFrame:\") print(events) progress_msg(\"Columns types (a.k.a. dtypes):\") print(events.dtypes) progress_msg(\"Breakdown of event types:\")", "timed(caption: str): start = time.time() yield total = time.time() - start print(f\"Time to", "DataFrame:\") print(events) progress_msg(\"Columns types (a.k.a. dtypes):\") print(events.dtypes) progress_msg(\"Breakdown of event types:\") print(events['event'].value_counts()) if", "with timed(\"load file - just the 'cryptic_attrs_cat' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs_cat']) with timed(\"load file", "def read_item_props(filepath: Path) -> DataFrame: df = pd.read_csv(filepath) df = df[df['property'].isin(ITEM_PROPERTY_COLUMNS)] first_value_per_item =", "timed(\"read & transform user events\"): progress_msg(\"Reading user events...\") events = pd.read_csv(path / EVENTS_FILE)", "user events\"): progress_msg(\"Reading user events...\") events = pd.read_csv(path / EVENTS_FILE) progress_msg(\"Joining events with", "def ingest(path: Path): with timed(\"read & transform item properties of all products\"): item_props_tempfile", "progress_msg(\"Breakdown of event types:\") print(events['event'].value_counts()) if len(events) != EXPECTED_EVENT_COUNT: progress_msg(f\"WARNING: Expected {EXPECTED_EVENT_COUNT} events,", "sys.exit(f'No such directory: {path}') files_in_path = {f.name for f in path.iterdir()} if not", "contextmanager import pandas as pd from pandas import DataFrame EVENTS_FILE = 'events.csv' PROPS_FILE_1", "structure from rows to columns def read_item_props(filepath: Path) -> DataFrame: df = pd.read_csv(filepath)", "** 2).round(decimals=2) progress_msg(f'Size of DataFrame columns in memory (in MB):') print(col_memory_sizes) progress_msg(f\"==> Saved", "item_props_df2 = read_item_props(path / PROPS_FILE_2) item_props_df = item_props_df1.combine_first(item_props_df2) progress_msg(f\"Storing item properties to {item_props_tempfile}", "Yellow, just yellow @contextmanager def timed(caption: str): start = time.time() yield total =", "products\"): item_props_tempfile = path / \"item_props.parquet\" if item_props_tempfile.exists(): progress_msg(f\"Reading item properties from cached", "its structure from rows to columns def read_item_props(filepath: Path) -> DataFrame: df =", "= first_value_per_item.to_frame() df = df.unstack(level=-1) df.columns = df.columns.droplevel(0) return df def ingest(path: Path):", "all products\"): item_props_tempfile = path / \"item_props.parquet\" if item_props_tempfile.exists(): progress_msg(f\"Reading item properties from", "columns def read_item_props(filepath: Path) -> DataFrame: df = pd.read_csv(filepath) df = df[df['property'].isin(ITEM_PROPERTY_COLUMNS)] first_value_per_item", "print(events['event'].value_counts()) if len(events) != EXPECTED_EVENT_COUNT: progress_msg(f\"WARNING: Expected {EXPECTED_EVENT_COUNT} events, but final DataFrame has", "progress_msg(msg: str): print(f\"\\033[33m{msg}\\033[0m\") # Yellow, just yellow @contextmanager def timed(caption: str): start =", "be written') args = parser.parse_args() path = Path(args.path) if not path.exists() or not", "for relevant columns and 'pivot' its structure from rows to columns def read_item_props(filepath:", "filter for relevant columns and 'pivot' its structure from rows to columns def", "ingest(path: Path): with timed(\"read & transform item properties of all products\"): item_props_tempfile =", "from pathlib import Path from contextlib import contextmanager import pandas as pd from", "= (events.memory_usage(deep=True) / 1024 ** 2).round(decimals=2) progress_msg(f'Size of DataFrame columns in memory (in", "just yellow @contextmanager def timed(caption: str): start = time.time() yield total = time.time()", "= events['cryptic_attrs'].astype('category') events.reset_index(drop=True) progress_msg(\"Excerpt from final DataFrame:\") print(events) progress_msg(\"Columns types (a.k.a. dtypes):\") print(events.dtypes)", "two\"): cols = [col for col in events.dtypes.index if col not in ['cryptic_attrs',", "'cryptic_attrs_cat']] pd.read_parquet(output_file, columns=cols) if __name__ == '__main__': parser = argparse.ArgumentParser( description='Ingest RetailRocket dataset", "for f in path.iterdir()} if not files_in_path >= INPUT_FILENAMES: sys.exit(f'Missing one or more", "if not path.exists() or not path.is_dir(): sys.exit(f'No such directory: {path}') files_in_path = {f.name", "re-runs...\") item_props_df.to_parquet(item_props_tempfile) with timed(\"read & transform user events\"): progress_msg(\"Reading user events...\") events =", "file - all columns *except* these two\"): cols = [col for col in", "path.exists() or not path.is_dir(): sys.exit(f'No such directory: {path}') files_in_path = {f.name for f", "== '__main__': parser = argparse.ArgumentParser( description='Ingest RetailRocket dataset (to download: https://www.kaggle.com/retailrocket/ecommerce-dataset/)') parser.add_argument( 'path',", "events = pd.merge(events, item_props_df, how='inner', on='itemid') progress_msg(\"Making columns more queryable...\") events['price'] = events['790'].str[1:].astype(float)", "downloaded dataset files are found and output file will be written') args =", "print(f\"\\033[33m{msg}\\033[0m\") # Yellow, just yellow @contextmanager def timed(caption: str): start = time.time() yield", "start print(f\"Time to {caption}: {total:.3f} seconds\") # Read item properties files, filter for", "['cryptic_attrs', 'cryptic_attrs_cat']] pd.read_parquet(output_file, columns=cols) if __name__ == '__main__': parser = argparse.ArgumentParser( description='Ingest RetailRocket", "RetailRocket dataset (to download: https://www.kaggle.com/retailrocket/ecommerce-dataset/)') parser.add_argument( 'path', type=str, help='Directory where downloaded dataset files", "df.columns.droplevel(0) return df def ingest(path: Path): with timed(\"read & transform item properties of", "= df.groupby([\"itemid\", \"property\"])[\"value\"].first() df = first_value_per_item.to_frame() df = df.unstack(level=-1) df.columns = df.columns.droplevel(0) return", "df = df.unstack(level=-1) df.columns = df.columns.droplevel(0) return df def ingest(path: Path): with timed(\"read", "events with item properties...\") events = pd.merge(events, item_props_df, how='inner', on='itemid') progress_msg(\"Making columns more", "transform item properties of all products\"): item_props_tempfile = path / \"item_props.parquet\" if item_props_tempfile.exists():", "col not in ['cryptic_attrs', 'cryptic_attrs_cat']] pd.read_parquet(output_file, columns=cols) if __name__ == '__main__': parser =", "2_500_516 def progress_msg(msg: str): print(f\"\\033[33m{msg}\\033[0m\") # Yellow, just yellow @contextmanager def timed(caption: str):", "types:\") print(events['event'].value_counts()) if len(events) != EXPECTED_EVENT_COUNT: progress_msg(f\"WARNING: Expected {EXPECTED_EVENT_COUNT} events, but final DataFrame", "Path): with timed(\"read & transform item properties of all products\"): item_props_tempfile = path", "import Path from contextlib import contextmanager import pandas as pd from pandas import", "path.iterdir()} if not files_in_path >= INPUT_FILENAMES: sys.exit(f'Missing one or more input files: {INPUT_FILENAMES}')", "/ PROPS_FILE_2) item_props_df = item_props_df1.combine_first(item_props_df2) progress_msg(f\"Storing item properties to {item_props_tempfile} for faster re-runs...\")", "columns and 'pivot' its structure from rows to columns def read_item_props(filepath: Path) ->", "file {item_props_tempfile}\") item_props_df = pd.read_parquet(item_props_tempfile) else: progress_msg(\"Reading item properties... (this takes a bit)\")", "download: https://www.kaggle.com/retailrocket/ecommerce-dataset/)') parser.add_argument( 'path', type=str, help='Directory where downloaded dataset files are found and", "path / \"item_props.parquet\" if item_props_tempfile.exists(): progress_msg(f\"Reading item properties from cached file {item_props_tempfile}\") item_props_df", "/ 1024 ** 2).round(decimals=2) progress_msg(f'Size of DataFrame columns in memory (in MB):') print(col_memory_sizes)", "properties of all products\"): item_props_tempfile = path / \"item_props.parquet\" if item_props_tempfile.exists(): progress_msg(f\"Reading item", "df = first_value_per_item.to_frame() df = df.unstack(level=-1) df.columns = df.columns.droplevel(0) return df def ingest(path:", "properties... (this takes a bit)\") item_props_df1 = read_item_props(path / PROPS_FILE_1) item_props_df2 = read_item_props(path", "progress_msg(f\"WARNING: Expected {EXPECTED_EVENT_COUNT} events, but final DataFrame has {len(events)}\") output_file = path /", "the 'cryptic_attrs_cat' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs_cat']) with timed(\"load file - all columns *except* these", "with timed(\"load file - all columns *except* these two\"): cols = [col for", "(in MB):') print(col_memory_sizes) progress_msg(f\"==> Saved output file to: {output_file}, size: {output_file.stat().st_size / 1024", "events['cryptic_attrs'].astype('category') events.reset_index(drop=True) progress_msg(\"Excerpt from final DataFrame:\") print(events) progress_msg(\"Columns types (a.k.a. dtypes):\") print(events.dtypes) progress_msg(\"Breakdown", "for faster re-runs...\") item_props_df.to_parquet(item_props_tempfile) with timed(\"read & transform user events\"): progress_msg(\"Reading user events...\")", "just the 'cryptic_attrs' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs']) with timed(\"load file - just the 'cryptic_attrs_cat'", "df = pd.read_csv(filepath) df = df[df['property'].isin(ITEM_PROPERTY_COLUMNS)] first_value_per_item = df.groupby([\"itemid\", \"property\"])[\"value\"].first() df = first_value_per_item.to_frame()", "and 'pivot' its structure from rows to columns def read_item_props(filepath: Path) -> DataFrame:", "= pd.read_csv(filepath) df = df[df['property'].isin(ITEM_PROPERTY_COLUMNS)] first_value_per_item = df.groupby([\"itemid\", \"property\"])[\"value\"].first() df = first_value_per_item.to_frame() df", "{'categoryid', 'available', '790', '888'} EXPECTED_EVENT_COUNT = 2_500_516 def progress_msg(msg: str): print(f\"\\033[33m{msg}\\033[0m\") # Yellow,", "{path}') files_in_path = {f.name for f in path.iterdir()} if not files_in_path >= INPUT_FILENAMES:", "= events['available'].astype(int).astype(bool) events['categoryid'] = events['categoryid'].astype('category') events['event'] = events['event'].astype('category') events.rename(columns={'888': 'cryptic_attrs'}, inplace=True) progress_msg(\"Storing 'cryptic_attrs'", "timed(\"load file - just the 'cryptic_attrs_cat' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs_cat']) with timed(\"load file -", "to columns def read_item_props(filepath: Path) -> DataFrame: df = pd.read_csv(filepath) df = df[df['property'].isin(ITEM_PROPERTY_COLUMNS)]", "= path / \"item_props.parquet\" if item_props_tempfile.exists(): progress_msg(f\"Reading item properties from cached file {item_props_tempfile}\")", "as categorical column 'cryptic_attrs_cat'...\") events['cryptic_attrs_cat'] = events['cryptic_attrs'].astype('category') events.reset_index(drop=True) progress_msg(\"Excerpt from final DataFrame:\") print(events)", "from rows to columns def read_item_props(filepath: Path) -> DataFrame: df = pd.read_csv(filepath) df", "progress_msg(\"Making columns more queryable...\") events['price'] = events['790'].str[1:].astype(float) / 1000 events.drop(columns=['790'], inplace=True) events['available'] =", "else: progress_msg(\"Reading item properties... (this takes a bit)\") item_props_df1 = read_item_props(path / PROPS_FILE_1)", "progress_msg(\"Columns types (a.k.a. dtypes):\") print(events.dtypes) progress_msg(\"Breakdown of event types:\") print(events['event'].value_counts()) if len(events) !=", "yellow @contextmanager def timed(caption: str): start = time.time() yield total = time.time() -", "cols = [col for col in events.dtypes.index if col not in ['cryptic_attrs', 'cryptic_attrs_cat']]", "= events['790'].str[1:].astype(float) / 1000 events.drop(columns=['790'], inplace=True) events['available'] = events['available'].astype(int).astype(bool) events['categoryid'] = events['categoryid'].astype('category') events['event']", "progress_msg(\"Joining events with item properties...\") events = pd.merge(events, item_props_df, how='inner', on='itemid') progress_msg(\"Making columns", "parser.add_argument( 'path', type=str, help='Directory where downloaded dataset files are found and output file", "columns in memory (in MB):') print(col_memory_sizes) progress_msg(f\"==> Saved output file to: {output_file}, size:", "not path.exists() or not path.is_dir(): sys.exit(f'No such directory: {path}') files_in_path = {f.name for", "def timed(caption: str): start = time.time() yield total = time.time() - start print(f\"Time", "events['available'] = events['available'].astype(int).astype(bool) events['categoryid'] = events['categoryid'].astype('category') events['event'] = events['event'].astype('category') events.rename(columns={'888': 'cryptic_attrs'}, inplace=True) progress_msg(\"Storing", "{caption}: {total:.3f} seconds\") # Read item properties files, filter for relevant columns and", "files_in_path = {f.name for f in path.iterdir()} if not files_in_path >= INPUT_FILENAMES: sys.exit(f'Missing", "sys import time import argparse from pathlib import Path from contextlib import contextmanager", "item_props_tempfile = path / \"item_props.parquet\" if item_props_tempfile.exists(): progress_msg(f\"Reading item properties from cached file", "item_props_df1 = read_item_props(path / PROPS_FILE_1) item_props_df2 = read_item_props(path / PROPS_FILE_2) item_props_df = item_props_df1.combine_first(item_props_df2)", "not in ['cryptic_attrs', 'cryptic_attrs_cat']] pd.read_parquet(output_file, columns=cols) if __name__ == '__main__': parser = argparse.ArgumentParser(", "bit)\") item_props_df1 = read_item_props(path / PROPS_FILE_1) item_props_df2 = read_item_props(path / PROPS_FILE_2) item_props_df =", "= df.unstack(level=-1) df.columns = df.columns.droplevel(0) return df def ingest(path: Path): with timed(\"read &", "will be written') args = parser.parse_args() path = Path(args.path) if not path.exists() or", "read_item_props(path / PROPS_FILE_2) item_props_df = item_props_df1.combine_first(item_props_df2) progress_msg(f\"Storing item properties to {item_props_tempfile} for faster", "{total:.3f} seconds\") # Read item properties files, filter for relevant columns and 'pivot'", "all columns\"): pd.read_parquet(output_file) with timed(\"load file - just the 'cryptic_attrs' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs'])", "events...\") events = pd.read_csv(path / EVENTS_FILE) progress_msg(\"Joining events with item properties...\") events =", "pandas as pd from pandas import DataFrame EVENTS_FILE = 'events.csv' PROPS_FILE_1 = 'item_properties_part1.csv'", "item_props_df.to_parquet(item_props_tempfile) with timed(\"read & transform user events\"): progress_msg(\"Reading user events...\") events = pd.read_csv(path", "progress_msg(f\"==> Saved output file to: {output_file}, size: {output_file.stat().st_size / 1024 ** 2:.1f}MB\") with", "if item_props_tempfile.exists(): progress_msg(f\"Reading item properties from cached file {item_props_tempfile}\") item_props_df = pd.read_parquet(item_props_tempfile) else:", "progress_msg(\"Storing 'cryptic_attrs' also as categorical column 'cryptic_attrs_cat'...\") events['cryptic_attrs_cat'] = events['cryptic_attrs'].astype('category') events.reset_index(drop=True) progress_msg(\"Excerpt from", "events['event'].astype('category') events.rename(columns={'888': 'cryptic_attrs'}, inplace=True) progress_msg(\"Storing 'cryptic_attrs' also as categorical column 'cryptic_attrs_cat'...\") events['cryptic_attrs_cat'] =", "file will be written') args = parser.parse_args() path = Path(args.path) if not path.exists()", "read_item_props(path / PROPS_FILE_1) item_props_df2 = read_item_props(path / PROPS_FILE_2) item_props_df = item_props_df1.combine_first(item_props_df2) progress_msg(f\"Storing item", "events['categoryid'] = events['categoryid'].astype('category') events['event'] = events['event'].astype('category') events.rename(columns={'888': 'cryptic_attrs'}, inplace=True) progress_msg(\"Storing 'cryptic_attrs' also as", "events = pd.read_csv(path / EVENTS_FILE) progress_msg(\"Joining events with item properties...\") events = pd.merge(events,", "PROPS_FILE_2) item_props_df = item_props_df1.combine_first(item_props_df2) progress_msg(f\"Storing item properties to {item_props_tempfile} for faster re-runs...\") item_props_df.to_parquet(item_props_tempfile)", "Path) -> DataFrame: df = pd.read_csv(filepath) df = df[df['property'].isin(ITEM_PROPERTY_COLUMNS)] first_value_per_item = df.groupby([\"itemid\", \"property\"])[\"value\"].first()", "such directory: {path}') files_in_path = {f.name for f in path.iterdir()} if not files_in_path", "rows to columns def read_item_props(filepath: Path) -> DataFrame: df = pd.read_csv(filepath) df =", "= pd.read_parquet(item_props_tempfile) else: progress_msg(\"Reading item properties... (this takes a bit)\") item_props_df1 = read_item_props(path", "- all columns *except* these two\"): cols = [col for col in events.dtypes.index", "df.groupby([\"itemid\", \"property\"])[\"value\"].first() df = first_value_per_item.to_frame() df = df.unstack(level=-1) df.columns = df.columns.droplevel(0) return df", "= argparse.ArgumentParser( description='Ingest RetailRocket dataset (to download: https://www.kaggle.com/retailrocket/ecommerce-dataset/)') parser.add_argument( 'path', type=str, help='Directory where", "& transform user events\"): progress_msg(\"Reading user events...\") events = pd.read_csv(path / EVENTS_FILE) progress_msg(\"Joining", "events\"): progress_msg(\"Reading user events...\") events = pd.read_csv(path / EVENTS_FILE) progress_msg(\"Joining events with item", "- start print(f\"Time to {caption}: {total:.3f} seconds\") # Read item properties files, filter", "-> DataFrame: df = pd.read_csv(filepath) df = df[df['property'].isin(ITEM_PROPERTY_COLUMNS)] first_value_per_item = df.groupby([\"itemid\", \"property\"])[\"value\"].first() df", "df def ingest(path: Path): with timed(\"read & transform item properties of all products\"):", "columns more queryable...\") events['price'] = events['790'].str[1:].astype(float) / 1000 events.drop(columns=['790'], inplace=True) events['available'] = events['available'].astype(int).astype(bool)", "first_value_per_item.to_frame() df = df.unstack(level=-1) df.columns = df.columns.droplevel(0) return df def ingest(path: Path): with", "len(events) != EXPECTED_EVENT_COUNT: progress_msg(f\"WARNING: Expected {EXPECTED_EVENT_COUNT} events, but final DataFrame has {len(events)}\") output_file", "read_item_props(filepath: Path) -> DataFrame: df = pd.read_csv(filepath) df = df[df['property'].isin(ITEM_PROPERTY_COLUMNS)] first_value_per_item = df.groupby([\"itemid\",", "\"item_props.parquet\" if item_props_tempfile.exists(): progress_msg(f\"Reading item properties from cached file {item_props_tempfile}\") item_props_df = pd.read_parquet(item_props_tempfile)", "= 'item_properties_part2.csv' INPUT_FILENAMES = {EVENTS_FILE, PROPS_FILE_1, PROPS_FILE_2} ITEM_PROPERTY_COLUMNS = {'categoryid', 'available', '790', '888'}", "to: {output_file}, size: {output_file.stat().st_size / 1024 ** 2:.1f}MB\") with timed(\"load file - all", "size: {output_file.stat().st_size / 1024 ** 2:.1f}MB\") with timed(\"load file - all columns\"): pd.read_parquet(output_file)", "memory (in MB):') print(col_memory_sizes) progress_msg(f\"==> Saved output file to: {output_file}, size: {output_file.stat().st_size /", "events['790'].str[1:].astype(float) / 1000 events.drop(columns=['790'], inplace=True) events['available'] = events['available'].astype(int).astype(bool) events['categoryid'] = events['categoryid'].astype('category') events['event'] =", "pd.read_parquet(output_file) with timed(\"load file - just the 'cryptic_attrs' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs']) with timed(\"load", "@contextmanager def timed(caption: str): start = time.time() yield total = time.time() - start", "has {len(events)}\") output_file = path / 'retailrocket.parquet' events.to_parquet(output_file) col_memory_sizes = (events.memory_usage(deep=True) / 1024", "'retailrocket.parquet' events.to_parquet(output_file) col_memory_sizes = (events.memory_usage(deep=True) / 1024 ** 2).round(decimals=2) progress_msg(f'Size of DataFrame columns", "'__main__': parser = argparse.ArgumentParser( description='Ingest RetailRocket dataset (to download: https://www.kaggle.com/retailrocket/ecommerce-dataset/)') parser.add_argument( 'path', type=str,", "more queryable...\") events['price'] = events['790'].str[1:].astype(float) / 1000 events.drop(columns=['790'], inplace=True) events['available'] = events['available'].astype(int).astype(bool) events['categoryid']", "- just the 'cryptic_attrs_cat' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs_cat']) with timed(\"load file - all columns", "df = df[df['property'].isin(ITEM_PROPERTY_COLUMNS)] first_value_per_item = df.groupby([\"itemid\", \"property\"])[\"value\"].first() df = first_value_per_item.to_frame() df = df.unstack(level=-1)", "/ PROPS_FILE_1) item_props_df2 = read_item_props(path / PROPS_FILE_2) item_props_df = item_props_df1.combine_first(item_props_df2) progress_msg(f\"Storing item properties", "events['available'].astype(int).astype(bool) events['categoryid'] = events['categoryid'].astype('category') events['event'] = events['event'].astype('category') events.rename(columns={'888': 'cryptic_attrs'}, inplace=True) progress_msg(\"Storing 'cryptic_attrs' also", "Saved output file to: {output_file}, size: {output_file.stat().st_size / 1024 ** 2:.1f}MB\") with timed(\"load", "2).round(decimals=2) progress_msg(f'Size of DataFrame columns in memory (in MB):') print(col_memory_sizes) progress_msg(f\"==> Saved output", "'cryptic_attrs_cat'...\") events['cryptic_attrs_cat'] = events['cryptic_attrs'].astype('category') events.reset_index(drop=True) progress_msg(\"Excerpt from final DataFrame:\") print(events) progress_msg(\"Columns types (a.k.a.", "columns\"): pd.read_parquet(output_file) with timed(\"load file - just the 'cryptic_attrs' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs']) with", "all columns *except* these two\"): cols = [col for col in events.dtypes.index if", "and output file will be written') args = parser.parse_args() path = Path(args.path) if", "= item_props_df1.combine_first(item_props_df2) progress_msg(f\"Storing item properties to {item_props_tempfile} for faster re-runs...\") item_props_df.to_parquet(item_props_tempfile) with timed(\"read", "user events...\") events = pd.read_csv(path / EVENTS_FILE) progress_msg(\"Joining events with item properties...\") events", "events.to_parquet(output_file) col_memory_sizes = (events.memory_usage(deep=True) / 1024 ** 2).round(decimals=2) progress_msg(f'Size of DataFrame columns in", "{output_file.stat().st_size / 1024 ** 2:.1f}MB\") with timed(\"load file - all columns\"): pd.read_parquet(output_file) with", "properties...\") events = pd.merge(events, item_props_df, how='inner', on='itemid') progress_msg(\"Making columns more queryable...\") events['price'] =", "item_props_df1.combine_first(item_props_df2) progress_msg(f\"Storing item properties to {item_props_tempfile} for faster re-runs...\") item_props_df.to_parquet(item_props_tempfile) with timed(\"read &", "col_memory_sizes = (events.memory_usage(deep=True) / 1024 ** 2).round(decimals=2) progress_msg(f'Size of DataFrame columns in memory", "{EXPECTED_EVENT_COUNT} events, but final DataFrame has {len(events)}\") output_file = path / 'retailrocket.parquet' events.to_parquet(output_file)", "the 'cryptic_attrs' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs']) with timed(\"load file - just the 'cryptic_attrs_cat' column\"):", "column\"): pd.read_parquet(output_file, columns=['cryptic_attrs']) with timed(\"load file - just the 'cryptic_attrs_cat' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs_cat'])", "df.columns = df.columns.droplevel(0) return df def ingest(path: Path): with timed(\"read & transform item", "timed(\"load file - all columns *except* these two\"): cols = [col for col", "1024 ** 2).round(decimals=2) progress_msg(f'Size of DataFrame columns in memory (in MB):') print(col_memory_sizes) progress_msg(f\"==>", "where downloaded dataset files are found and output file will be written') args", "not path.is_dir(): sys.exit(f'No such directory: {path}') files_in_path = {f.name for f in path.iterdir()}", "DataFrame has {len(events)}\") output_file = path / 'retailrocket.parquet' events.to_parquet(output_file) col_memory_sizes = (events.memory_usage(deep=True) /", "{item_props_tempfile} for faster re-runs...\") item_props_df.to_parquet(item_props_tempfile) with timed(\"read & transform user events\"): progress_msg(\"Reading user", "timed(\"read & transform item properties of all products\"): item_props_tempfile = path / \"item_props.parquet\"", "of all products\"): item_props_tempfile = path / \"item_props.parquet\" if item_props_tempfile.exists(): progress_msg(f\"Reading item properties", "df[df['property'].isin(ITEM_PROPERTY_COLUMNS)] first_value_per_item = df.groupby([\"itemid\", \"property\"])[\"value\"].first() df = first_value_per_item.to_frame() df = df.unstack(level=-1) df.columns =", "item properties of all products\"): item_props_tempfile = path / \"item_props.parquet\" if item_props_tempfile.exists(): progress_msg(f\"Reading", "path / 'retailrocket.parquet' events.to_parquet(output_file) col_memory_sizes = (events.memory_usage(deep=True) / 1024 ** 2).round(decimals=2) progress_msg(f'Size of", "print(events) progress_msg(\"Columns types (a.k.a. dtypes):\") print(events.dtypes) progress_msg(\"Breakdown of event types:\") print(events['event'].value_counts()) if len(events)", "= path / 'retailrocket.parquet' events.to_parquet(output_file) col_memory_sizes = (events.memory_usage(deep=True) / 1024 ** 2).round(decimals=2) progress_msg(f'Size", "# Read item properties files, filter for relevant columns and 'pivot' its structure", "item_props_df = item_props_df1.combine_first(item_props_df2) progress_msg(f\"Storing item properties to {item_props_tempfile} for faster re-runs...\") item_props_df.to_parquet(item_props_tempfile) with", "str): start = time.time() yield total = time.time() - start print(f\"Time to {caption}:", "'888'} EXPECTED_EVENT_COUNT = 2_500_516 def progress_msg(msg: str): print(f\"\\033[33m{msg}\\033[0m\") # Yellow, just yellow @contextmanager", "& transform item properties of all products\"): item_props_tempfile = path / \"item_props.parquet\" if", "these two\"): cols = [col for col in events.dtypes.index if col not in", "Path from contextlib import contextmanager import pandas as pd from pandas import DataFrame", "event types:\") print(events['event'].value_counts()) if len(events) != EXPECTED_EVENT_COUNT: progress_msg(f\"WARNING: Expected {EXPECTED_EVENT_COUNT} events, but final", "pd.read_parquet(item_props_tempfile) else: progress_msg(\"Reading item properties... (this takes a bit)\") item_props_df1 = read_item_props(path /", "path.is_dir(): sys.exit(f'No such directory: {path}') files_in_path = {f.name for f in path.iterdir()} if", "/ EVENTS_FILE) progress_msg(\"Joining events with item properties...\") events = pd.merge(events, item_props_df, how='inner', on='itemid')", "{output_file}, size: {output_file.stat().st_size / 1024 ** 2:.1f}MB\") with timed(\"load file - all columns\"):", "/ \"item_props.parquet\" if item_props_tempfile.exists(): progress_msg(f\"Reading item properties from cached file {item_props_tempfile}\") item_props_df =", "PROPS_FILE_2 = 'item_properties_part2.csv' INPUT_FILENAMES = {EVENTS_FILE, PROPS_FILE_1, PROPS_FILE_2} ITEM_PROPERTY_COLUMNS = {'categoryid', 'available', '790',", "dataset (to download: https://www.kaggle.com/retailrocket/ecommerce-dataset/)') parser.add_argument( 'path', type=str, help='Directory where downloaded dataset files are", "MB):') print(col_memory_sizes) progress_msg(f\"==> Saved output file to: {output_file}, size: {output_file.stat().st_size / 1024 **", "= [col for col in events.dtypes.index if col not in ['cryptic_attrs', 'cryptic_attrs_cat']] pd.read_parquet(output_file,", "# Yellow, just yellow @contextmanager def timed(caption: str): start = time.time() yield total", "'790', '888'} EXPECTED_EVENT_COUNT = 2_500_516 def progress_msg(msg: str): print(f\"\\033[33m{msg}\\033[0m\") # Yellow, just yellow", "= events['event'].astype('category') events.rename(columns={'888': 'cryptic_attrs'}, inplace=True) progress_msg(\"Storing 'cryptic_attrs' also as categorical column 'cryptic_attrs_cat'...\") events['cryptic_attrs_cat']", "timed(\"load file - just the 'cryptic_attrs' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs']) with timed(\"load file -", "file - just the 'cryptic_attrs' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs']) with timed(\"load file - just", "transform user events\"): progress_msg(\"Reading user events...\") events = pd.read_csv(path / EVENTS_FILE) progress_msg(\"Joining events", "pd.read_parquet(output_file, columns=['cryptic_attrs']) with timed(\"load file - just the 'cryptic_attrs_cat' column\"): pd.read_parquet(output_file, columns=['cryptic_attrs_cat']) with", "to {item_props_tempfile} for faster re-runs...\") item_props_df.to_parquet(item_props_tempfile) with timed(\"read & transform user events\"): progress_msg(\"Reading", "with item properties...\") events = pd.merge(events, item_props_df, how='inner', on='itemid') progress_msg(\"Making columns more queryable...\")", "Path(args.path) if not path.exists() or not path.is_dir(): sys.exit(f'No such directory: {path}') files_in_path =", "f in path.iterdir()} if not files_in_path >= INPUT_FILENAMES: sys.exit(f'Missing one or more input", "!= EXPECTED_EVENT_COUNT: progress_msg(f\"WARNING: Expected {EXPECTED_EVENT_COUNT} events, but final DataFrame has {len(events)}\") output_file =", "events['cryptic_attrs_cat'] = events['cryptic_attrs'].astype('category') events.reset_index(drop=True) progress_msg(\"Excerpt from final DataFrame:\") print(events) progress_msg(\"Columns types (a.k.a. dtypes):\")", "pathlib import Path from contextlib import contextmanager import pandas as pd from pandas", "properties from cached file {item_props_tempfile}\") item_props_df = pd.read_parquet(item_props_tempfile) else: progress_msg(\"Reading item properties... (this", "Expected {EXPECTED_EVENT_COUNT} events, but final DataFrame has {len(events)}\") output_file = path / 'retailrocket.parquet'", "final DataFrame:\") print(events) progress_msg(\"Columns types (a.k.a. dtypes):\") print(events.dtypes) progress_msg(\"Breakdown of event types:\") print(events['event'].value_counts())", "from pandas import DataFrame EVENTS_FILE = 'events.csv' PROPS_FILE_1 = 'item_properties_part1.csv' PROPS_FILE_2 = 'item_properties_part2.csv'", "columns *except* these two\"): cols = [col for col in events.dtypes.index if col", "item_props_df, how='inner', on='itemid') progress_msg(\"Making columns more queryable...\") events['price'] = events['790'].str[1:].astype(float) / 1000 events.drop(columns=['790'],", "<reponame>DynamicYieldProjects/funnel-rocket import sys import time import argparse from pathlib import Path from contextlib", "in memory (in MB):') print(col_memory_sizes) progress_msg(f\"==> Saved output file to: {output_file}, size: {output_file.stat().st_size", "to {caption}: {total:.3f} seconds\") # Read item properties files, filter for relevant columns", "takes a bit)\") item_props_df1 = read_item_props(path / PROPS_FILE_1) item_props_df2 = read_item_props(path / PROPS_FILE_2)", "item properties...\") events = pd.merge(events, item_props_df, how='inner', on='itemid') progress_msg(\"Making columns more queryable...\") events['price']", "item properties to {item_props_tempfile} for faster re-runs...\") item_props_df.to_parquet(item_props_tempfile) with timed(\"read & transform user", "parser.parse_args() path = Path(args.path) if not path.exists() or not path.is_dir(): sys.exit(f'No such directory:", "__name__ == '__main__': parser = argparse.ArgumentParser( description='Ingest RetailRocket dataset (to download: https://www.kaggle.com/retailrocket/ecommerce-dataset/)') parser.add_argument(", "dtypes):\") print(events.dtypes) progress_msg(\"Breakdown of event types:\") print(events['event'].value_counts()) if len(events) != EXPECTED_EVENT_COUNT: progress_msg(f\"WARNING: Expected", "1024 ** 2:.1f}MB\") with timed(\"load file - all columns\"): pd.read_parquet(output_file) with timed(\"load file", "final DataFrame has {len(events)}\") output_file = path / 'retailrocket.parquet' events.to_parquet(output_file) col_memory_sizes = (events.memory_usage(deep=True)", "args = parser.parse_args() path = Path(args.path) if not path.exists() or not path.is_dir(): sys.exit(f'No", "return df def ingest(path: Path): with timed(\"read & transform item properties of all" ]
[ "= sklearn.metrics.pairwise_distances(self.grid_sample, self.grid_sample_i) pwd22 = sklearn.metrics.pairwise_distances(self.grid_sample_i) self.edge_index = np.vstack(np.where(pwd <= r11)) self.edge_index_12 =", "print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def sample(self, theta, Y): theta_d =", "def decode(self, x, sample_idx=None): x = (x * (self.std + self.eps)) + self.mean", "grid_list, theta_list, edge_index_list, edge_index_list_cuda def get_edge_attr(grid, theta, edge_index): n_edges = edge_index.shape[1] edge_attr =", "= f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) def get_boundary(self): s = self.s", "edge_attr_global.append(edge_attr_inter) X = torch.cat(X_global, dim=0) edge_index = torch.cat(edge_index_global, dim=1) edge_attr = torch.cat(edge_attr_global, dim=0)", "if not self.old_mat: x = x[()] x = np.transpose(x, axes=range(len(x.shape) - 1, -1,", "rel(self, x, y): num_examples = x.size()[0] diff_norms = torch.norm(x.reshape(num_examples,-1) - y.reshape(num_examples,-1), self.p, 1)", "range(self.level): edge_attr = np.zeros((self.n_edges_inner[l], 2 * self.d + 2)) edge_attr[:, 0:2 * self.d]", "super(TorusGridSplitter, self).__init__() self.grid = grid.reshape(resolution, resolution,2) # self.theta = theta.reshape(resolution, resolution,-1) # self.y", "in range(batch_size2): pred_ij = pred_i[j,:].reshape(-1,) idx = split_idx_i[j,:].reshape(-1,) out[idx] = out[idx] + pred_ij", "are NN but they are not NearestNeighbor edge_index_inter = [] for x_i in", "n_edge_index + self.edge_index_down[l].shape[1] edge_index_down_range[l, 1] = n_edge_index edge_index_up_range[l, 1] = n_edge_index return edge_index_range,", "4:4 + self.edge_features] = a[edge_index[0]] edge_attr[:, 4 + self.edge_features: 4 + self.edge_features *", "1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge_aug_full(n_x, n_y, r,", "n_x - 1): d = 1 / n_x a1 = a[x, y] a2", "theta_d) Y_sub = Y[x::self.r, y::self.r].reshape(-1,) index_sub = self.index[x::self.r, y::self.r].reshape(-1,) n_sub = Y_sub.shape[0] if", "X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l, a) elif grid == 'grid_edge_aug': X, edge_index_inner,", "edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l, a) elif grid == 'grid_edge_aug': X, edge_index_inner, edge_attr_inner", "torch.tensor(out, dtype=torch.float) return out.reshape(-1,) # generate multi-level graph, with split and assemble class", "= edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def ball_connectivity(self, grid): pwd = sklearn.metrics.pairwise_distances(grid)", "in range(n_y): for x in range(n_x): i = y * n_x + x", "torch.cat([theta_sub, theta_sample], dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=0).reshape(-1,) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1,)", "a1)) X = torch.tensor(xs, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index,", "edge_attr_up[:, 2 * self.d + 1] = theta[self.edge_index_up[l][1]] self.edge_attr_up.append(torch.tensor(edge_attr_up, dtype=torch.float)) edge_attr_out = torch.cat(self.edge_attr,", "= [] self.edge_index_up = [] self.edge_attr = [] self.edge_attr_down = [] self.edge_attr_up =", "Lp loss class LpLoss(object): def __init__(self, d=2, p=2, size_average=True, reduction=True): super(LpLoss, self).__init__() #Dimension", "self.grid_sample_all[self.edge_index[l].T].reshape( (self.n_edges_inner[l], 2 * self.d)) edge_attr[:, 2 * self.d] = theta[self.edge_index[l][0]] edge_attr[:, 2", "= 1 / n_y edge_index.append((i, i + n_x)) edge_index.append((i + n_x, i)) if", "for x in (-1,1): x_j = x_i + x if is_periodic: x_j =", "distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3+self.edge_features*2)) a = theta_split[:, :self.edge_features]", "shall be stored as tensor instead of list # we concatenate the edge", "y.reshape(resolution, resolution,1) self.resolution = resolution if resolution%2==1: self.s = int(((resolution - 1)/r) +", "torch.cat([theta_sub, theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr = np.zeros((n_edges, 4+self.edge_features*2))", "= theta_split[:, :self.edge_features] edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4+self.edge_features] = a[edge_index[0]] edge_attr[:,", "self.edge_attr_up.append(torch.tensor(edge_attr_up, dtype=torch.float)) edge_attr_out = torch.cat(self.edge_attr, dim=0) edge_attr_down_out = torch.cat(self.edge_attr_down, dim=0) edge_attr_up_out = torch.cat(self.edge_attr_up,", "get_data(self, theta, params=None): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) data =", "x # if (xj, yj) is a valid node if is_periodic: x_j =", "// self.m # number of sub-grid def get_data(self, theta, edge_features=1): data = []", "is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges, -1)) else: theta = theta[self.idx] edge_attr = np.zeros((self.n_edges,", "self.d + 1] = theta[self.edge_index_down[l][1]] self.edge_attr_down.append(torch.tensor(edge_attr_down, dtype=torch.float)) edge_attr_up[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_up[l].T].reshape(", "i + 1)) edge_index.append((i + 1, i )) if a != None: a1", "torch.cat(X_global, dim=0) edge_index = torch.cat(edge_index_global, dim=1) edge_attr = torch.cat(edge_attr_global, dim=0) mask_index = torch.tensor(range(n_x", "nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet_sin, self).__init__() self.n_layers = len(layers) - 1 assert self.n_layers >=", "def _load_file(self): try: self.data = scipy.io.loadmat(self.file_path) self.old_mat = True except: self.data = h5py.File(self.file_path)", "= y.reshape(resolution, resolution,1) self.resolution = resolution if resolution%2==1: self.s = int(((resolution - 1)/r)", "torch.mean(x, 0) self.std = torch.std(x, 0) self.eps = eps def encode(self, x): x", "cpu(self): self.mean = self.mean.cpu() self.std = self.std.cpu() # normalization, scaling by range class", "(self.n_edges_inter[l], 2 * self.d)) edge_attr_up[:, 2 * self.d] = theta[self.edge_index_up[l][0]] edge_attr_up[:, 2 *", "def __init__(self, x, eps=0.00001): super(UnitGaussianNormalizer, self).__init__() # x could be in shape of", "l(x) return x class DenseNet_sin(torch.nn.Module): def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet_sin, self).__init__()", "self.m = sample_sizes self.level = level assert len(sample_sizes) == level assert len(mesh_size) ==", "= torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2) data = Data(x=X, y=Y_split,", "a[edge_index[0]] edge_attr[:, 3 + self.edge_features: 4 + self.edge_features * 2] = a[edge_index[1]] edge_attr", "a = downsample(params, n_x, (2 ** l)) if grid == 'grid': X, edge_index_inner,", "n_x) # xs = np.array(range(n_x)) # ys = np.array(range(n_y)) edge_index = [] edge_attr", "+= n_l # #construct inter-graph edge if l != depth-1: index2 = np.array(range(n_l//4)).reshape(h_x_l//2,", "[] self.edge_index_up = [] self.edge_attr = [] self.edge_attr_down = [] self.edge_attr_up = []", "pwd_index) distance = PWD[PWD_index] X_difference = X_DIFF[PWD_index] Y_difference = Y_DIFF[PWD_index] n_edges = edge_index.shape[1]", "return torch.sum(diff_norms/y_norms) return diff_norms/y_norms def __call__(self, x, y): return self.rel(x, y) # A", "x = x.astype(np.float32) if self.to_torch: x = torch.from_numpy(x) if self.to_cuda: x = x.cuda()", "pred, split_idx, batch_size2, sigma=1): # pred is a list (batches) of list (time", "a2, a1)) if (y != n_y - 1): d = 1 / n_y", "index edge_index_inner = edge_index_inner + num_nodes edge_index_global.append(edge_index_inner) edge_attr_global.append(edge_attr_inner) # construct X # if", "class MatReader(object): def __init__(self, file_path, to_torch=True, to_cuda=False, to_float=True): super(MatReader, self).__init__() self.to_torch = to_torch", "** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i + n_x, i)) edge_attr.append((d, a2,", "- 1.0) all_norms = (h**(self.d/self.p))*torch.norm(x.view(num_examples,-1) - y.view(num_examples,-1), self.p, 1) if self.reduction: if self.size_average:", "edge_attr[:,0:2*self.d] = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) edge_attr[:, 2 * self.d] = theta[self.edge_index[0]] edge_attr[:, 2 * self.d", "print('edge_inter', edge_index_inter.shape) print(len(grid_list),len(edge_index_list),len(theta_list)) return grid_list, theta_list, edge_index_list, edge_index_list_cuda def get_edge_attr(grid, theta, edge_index): n_edges", "np.zeros((self.n_edges, 2 * self.d + 2*self.attr_features)) edge_attr[:, 0:2 * self.d] = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1))", "graphs with sampling class RandomMeshGenerator(object): def __init__(self, real_space, mesh_size, sample_size, attr_features=1): super(RandomMeshGenerator, self).__init__()", "<gh_stars>100-1000 import torch import numpy as np import scipy.io import h5py import sklearn.metrics", "+ 2)) edge_attr[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index[l].T].reshape( (self.n_edges_inner[l], 2 * self.d)) edge_attr[:,", "range(n_x): i = x i1 = (x+1)%n_x edge_index.append((i, i1)) edge_index.append((i1, i )) i2", "= Y.reshape(self.resolution, self.resolution) x = torch.randint(0,self.r,(1,)) y = torch.randint(0,self.r,(1,)) grid_sub = self.grid[x::self.r, y::self.r,", "[1, n]) X2 = np.tile(x2.reshape(1, n), [n, 1]) X_diff = X1 - X2", "n_y - 1): edge_index.append((i, i + n_x)) edge_attr.append((0, 1, 0)) edge_index.append((i + n_x,", "edge_attr.append((x / n_x, a1, a2)) edge_attr.append((x / n_x, a2, a1)) X = torch.tensor(xs,", "[] edge_index_list = [] edge_index_list_cuda = [] level = int(np.log2(s) - 1) print(level)", "n_x - 1): edge_index.append((i, i + 1)) edge_attr.append((1, 0, 0)) edge_index.append((i + 1,", "else: idx = torch.cat((self.perm[index: ],self.perm[: index_end]), dim=0) self.idx.append(idx) self.grid_sample.append(self.grid[idx]) index = index_end if", "r): pwd = sklearn.metrics.pairwise_distances(self.grid) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] return", "ny = self.s # pred_ij = pred_i[idx : idx + nx * ny]", "= data[:, ::l, ::l] data = data.reshape(-1, (grid_size // l) ** 2) return", "= 1) index2 = torch.tensor(index2).reshape(-1) index2 = index2 + num_nodes index2 = torch.tensor(index2,", "= theta_l.reshape(N, n_l, theta_d) theta_l = torch.tensor(theta_l, dtype=torch.float) print(theta_l.shape) theta_list.append(theta_l) # for the", "= theta[self.edge_index_21[0]] edge_attr_21[:, 2 * self.d + 1] = theta[self.edge_index_21[1]] edge_attr_22 = np.zeros((self.n_edges_22,", "edge_index, edge_attr def grid_edge_aug(n_x, n_y, a): a = a.reshape(n_x, n_y) xs = np.linspace(0.0,", "range(s_l): for x in range(-3,4): x_j = x_i + x # if (xj,", "to_cuda=False, to_float=True): super(MatReader, self).__init__() self.to_torch = to_torch self.to_cuda = to_cuda self.to_float = to_float", "+ 1, y] edge_attr.append((x / n_x, y / n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x,", "__init__(self, real_space, mesh_size, sample_size, attr_features=1): super(RandomMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_size", "of randomly sample sub-grids, here we downsample sub-grids self.grid = grid.reshape(resolution, resolution,2) #", "edge_attr_inter2 = torch.tensor((0, 0,-1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter = torch.cat([edge_attr_inter1, edge_attr_inter2], dim=0) edge_index_global.append(edge_index_inter) edge_attr_global.append(edge_attr_inter)", "X_diff = X1 - X2 Y1 = np.tile(y1.reshape(n, 1), [1, n]) Y2 =", "[] if theta is None: for l in range(self.level): edge_attr = self.grid_sample_all[self.edge_index[l].T].reshape((self.n_edges_inner[l], 2*self.d))", "np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T def ball_connectivity(self, r): pwd = sklearn.metrics.pairwise_distances(self.grid) self.edge_index =", "pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l]) edge_index = np.vstack(np.where(pwd <= radius_inner[l])) + index self.edge_index.append(edge_index) edge_index_out.append(torch.tensor(edge_index, dtype=torch.long))", "= radius self.edge_features = edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def pairwise_difference(self,grid1, grid2):", "l in enumerate(self.layers): x = l(x) if j != self.n_layers - 1: x", "self.m = m self.radius = radius self.edge_features = edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution,", "n_y), dtype=torch.long) # print('create multi_grid with size:', X.shape, edge_index.shape, edge_attr.shape, mask_index.shape) return (X,", "= [] edge_index_down_out = [] edge_index_up_out = [] index = 0 for l", "get_boundary(self): s = self.s n = self.n boundary1 = np.array(range(0, s)) boundary2 =", "0:2 * self.d] = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_12[:, 2 * self.d] = theta[self.edge_index_12[0]] edge_attr_12[:,", "def __init__(self, grid, resolution, r, m=100, radius=0.15, T=None, edge_features=1, ): super(TorusGridSplitter, self).__init__() self.grid", "= 0 for l in range(self.level): self.idx.append(perm[index: index+self.m[l]]) self.grid_sample.append(self.grid[self.idx[l]]) index = index+self.m[l] self.idx_all", "edge_attr.append((y/n_y, x/n_x, a2, a1)) if (y != n_y - 1): d = 1", "a list (batches) of list (time seq) assert len(pred) == len(split_idx) assert len(pred[0])", "sklearn.metrics.pairwise_distances(self.grid_sample) pwd12 = sklearn.metrics.pairwise_distances(self.grid_sample, self.grid_sample_i) pwd22 = sklearn.metrics.pairwise_distances(self.grid_sample_i) self.edge_index = np.vstack(np.where(pwd <= r11))", "1 assert self.n_layers >= 1 self.layers = nn.ModuleList() for j in range(self.n_layers): self.layers.append(nn.Linear(layers[j],", "sample_idx is None: std = self.std + self.eps # n mean = self.mean", "x.size() x = x.view(s[0], -1) x = (x - self.b)/self.a x = x.view(s)", "and Lp-norm type are postive assert d > 0 and p > 0", "= self.grid_sample_all[self.edge_index_down[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_down[:, 2 * self.d] = theta[self.edge_index_down[l][0]] edge_attr_down[:,", "self.n_edges_inter = [] def sample(self, new_sample=True, index0=0): self.idx = [] self.grid_sample = []", "* self.d] = theta[self.edge_index_12[0]] edge_attr_12[:, 2 * self.d + 1] = theta[self.edge_index_12[1]] edge_attr_21", "if (xj, yj) is a valid node if is_periodic: x_j = x_j %", "= np.min(PWD, axis=2) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index,", "(NN) if l==1: edge_index_nn = [] for x_i in range(s_l): for x in", "1] = n_edge_index edge_index_up_range[l, 1] = n_edge_index return edge_index_range, edge_index_down_range, edge_index_up_range def attributes(self,", "= np.tile(y1.reshape(n, 1), [1, n]) Y2 = np.tile(y2.reshape(1, n), [n, 1]) Y_diff =", "= data.reshape(-1, grid_size, grid_size) data = data[:, ::l, ::l] data = data.reshape(-1, (grid_size", "+1] = theta[self.edge_index_boundary[1]] else: xy = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) if theta is None: edge_attr_boundary =", "self.d + 2*self.attr_features)) edge_attr[:, 0:2 * self.d] = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 *", "batch_size2, sigma=1): # pred is a list (batches) of list (time seq) assert", "# else: # X_l = torch.tensor(l, dtype=torch.float).repeat(n_l, 1) # X = torch.cat([X, X_l],", "theta, edge_index): n_edges = edge_index.shape[1] edge_attr = np.zeros((n_edges, 4)) edge_attr[:, 0:2] = grid[edge_index.transpose(0,1)].reshape((n_edges,", "encode(self, x): s = x.size() x = x.view(s[0], -1) x = self.a*x +", "return X, edge_index, edge_attr def grid_edge1d(n_x, a=None): if a != None: a =", "= torch.zeros((self.level-1,2), dtype=torch.long) n_edge_index = 0 for l in range(self.level): edge_index_range[l, 0] =", "mesh, generate a list of data data = [] index = 0 for", "theta_d, s, N, is_periodic=False): grid_list = [] theta_list = [] edge_index_list = []", "data class MatReader(object): def __init__(self, file_path, to_torch=True, to_cuda=False, to_float=True): super(MatReader, self).__init__() self.to_torch =", "m=100, radius=0.15, edge_features=1): super(DownsampleGridSplitter, self).__init__() # instead of randomly sample sub-grids, here we", "np.vstack(np.where(sample)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def get_grid(self): return torch.tensor(self.grid, dtype=torch.float) def", "edge_attr = f(xy[:, 0:self.d], xy[:, self.d:]) else: theta = theta[self.idx] edge_attr = f(xy[:,", "* self.d] = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) edge_attr_22[:, 2 * self.d] = theta[self.edge_index_22[0]] edge_attr_22[:, 2", "torch.randperm(self.n) index = 0 for l in range(self.level): self.idx.append(perm[index: index+self.m[l]]) self.grid_sample.append(self.grid[self.idx[l]]) index =", "edge_attr = np.zeros((self.n_edges, 3 * self.d)) edge_attr[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1))", "x_i + x if is_periodic: x_j = x_j % s_l # if (xj,", "a valid node if is_periodic: x_j = x_j % s_l if (x_j in", "index = 0 for i in range(self.splits): if i==0: idx, idx_all = self.sample(new_sample=True,", "y::self.r].reshape(-1,) n_sub = Y_sub.shape[0] if self.m >= n_sub: m = self.m - n_sub", "= eps def encode(self, x): x = (x - self.mean) / (self.std +", "len(self.mean.shape) > len(sample_idx[0].shape): std = self.std[:,sample_idx]+ self.eps # T*batch*n mean = self.mean[:,sample_idx] #", "self.grid[self.idx] self.grid_sample_i = self.grid[self.idx_i] self.grid_sample_both = self.grid[self.idx_both] return self.idx, self.idx_i, self.idx_both def get_grid(self):", "= np.vstack(np.where(pwd12 <= r12)) self.edge_index_12[1,:] = self.edge_index_12[1,:] + self.m self.edge_index_21 = self.edge_index_12[[1,0],:] self.edge_index_22", "for x in range(n_x): i = y * n_x + x if (x", "0] = n_edge_index edge_index_up_range[l, 0] = n_edge_index n_edge_index = n_edge_index + self.edge_index_down[l].shape[1] edge_index_down_range[l,", "** l) h_y_l = n_y // (2 ** l) n_l = h_x_l *", "r22): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) pwd12 = sklearn.metrics.pairwise_distances(self.grid_sample, self.grid_sample_i) pwd22 = sklearn.metrics.pairwise_distances(self.grid_sample_i) self.edge_index =", "self.l = l self.radius = radius assert self.n % self.m == 0 self.num", "out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) def assembleT(self, pred, split_idx, batch_size2, sigma=1): #", "= self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def torus1d_connectivity(self, r): grid = self.grid_sample pwd0 =", "x, eps=0.00001): super(GaussianNormalizer, self).__init__() self.mean = torch.mean(x) self.std = torch.std(x) self.eps = eps", "self.old_mat: x = x[()] x = np.transpose(x, axes=range(len(x.shape) - 1, -1, -1)) if", "# x could be in shape of ntrain*n or ntrain*T*n or ntrain*n*T self.mean", "torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X,", "grid == 'grid': X, edge_index_inner, edge_attr_inner = grid(h_y_l, h_x_l) elif grid == 'grid_edge':", "class RandomTwoMeshGenerator(object): def __init__(self, real_space, mesh_size, sample_size, induced_point): super(RandomTwoMeshGenerator, self).__init__() self.d = len(real_space)", "PWD = np.stack([pwd0,pwd1,pwd2,pwd3,pwd4], axis=2) X_DIFF = np.stack([X_diff0,X_diff1,X_diff2,X_diff3,X_diff4], axis=2) Y_DIFF = np.stack([Y_diff0, Y_diff1, Y_diff2,", "= torch.tensor(index2, dtype=torch.long) edge_index_inter1 = torch.cat([index1,index2], dim=-1).reshape(2,-1) edge_index_inter2 = torch.cat([index2,index1], dim=-1).reshape(2,-1) edge_index_inter =", "edge_attr_down=edge_attr_down, edge_attr_up=edge_attr_up, sample_idx=idx[0])) return data def assembler(self, out_list, sample_idx_list, is_cuda=False): assert len(out_list) ==", "np.vstack(np.where(sample)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def attributes(self, f=None, theta=None): if f", "index < index_end: idx = self.perm[index: index_end] else: idx = torch.cat((self.perm[index: ],self.perm[: index_end]),", "** 2), np.exp(-(d / 0.01) ** 2))) if (y != n_y - 1):", "= x.view(s) return x #loss function with rel/abs Lp loss class LpLoss(object): def", "self.d] = self.grid_sample_all[self.edge_index_up[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_up[:, 2 * self.d] = theta[self.edge_index_up[l][0]]", "torch.tensor(grid_l, dtype=torch.float) print(grid_l.shape) grid_list.append(grid_l) theta_l = theta[:,:,:theta_d].reshape(N, s, theta_d) theta_l = theta_l[:, ::r_l,", "else 'cpu') # reading data class MatReader(object): def __init__(self, file_path, to_torch=True, to_cuda=False, to_float=True):", "np.linspace(real_space[0][0], real_space[0][1], self.n).reshape((self.n, 1)) else: self.n = 1 grids = [] for j", "self.idx = [] self.grid_sample = [] if (new_sample) or (self.perm is None): self.perm", "a2 = a[x + 1, y] edge_attr.append((x / n_x, y / n_y, a1,", "mesh_size, level, sample_sizes): super(RandomMultiMeshSplitter, self).__init__() self.d = len(real_space) self.ms = sample_sizes self.m =", "n_edge_index = n_edge_index + self.edge_index_down[l].shape[1] edge_index_down_range[l, 1] = n_edge_index edge_index_up_range[l, 1] = n_edge_index", "gaussian_filter(out, sigma=sigma, mode='constant', cval=0) out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) # generate graph", "(self.std + self.eps) return x def decode(self, x, sample_idx=None): if sample_idx is None:", "self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def torus1d_connectivity(self, r): grid = self.grid_sample pwd0 = sklearn.metrics.pairwise_distances(grid,", "index = index % self.n index_end = (index+self.ms[l]) % self.n if index <", "get_grid(self): grid_out = [] for grid in self.grid_sample: grid_out.append(torch.tensor(grid, dtype=torch.float)) return grid_out, torch.tensor(self.grid_sample_all,", "= torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge_aug_full(n_x, n_y, r, a): n", "= np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T self.splits = self.n // self.m # number", "range(self.T): for i in range(len(pred)): pred_i = pred[i][t].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for", "def __init__(self, real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshSplitter, self).__init__() self.d = len(real_space) self.ms =", "self.n_edges_boundary = self.edge_index_boundary.shape[1] return torch.tensor(self.edge_index_boundary, dtype=torch.long) def attributes_boundary(self, f=None, theta=None): # if self.edge_index_boundary", "self).__init__() # x could be in shape of ntrain*n or ntrain*T*n or ntrain*n*T", "0) self.eps = eps def encode(self, x): x = (x - self.mean) /", "split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) if params==None: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) else: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr,", "torch.nn as nn from scipy.ndimage import gaussian_filter ################################################# # # Utilities # #################################################", "sub-grid def get_data(self, theta, edge_features=1): data = [] for i in range(self.l): perm", "if (y != n_y - 1): d = 1 / n_y a1 =", "level, we construct the nearest neighbors (NN) if l==1: edge_index_nn = [] for", "= self.grid self.grid_sample_i = self.grid self.grid_sample_both = self.grid def sample(self): perm = torch.randperm(self.n)", "self.mean = torch.mean(x, 0) self.std = torch.std(x, 0) self.eps = eps def encode(self,", "= np.stack([pwd0,pwd1,pwd2,pwd3,pwd4], axis=2) X_DIFF = np.stack([X_diff0,X_diff1,X_diff2,X_diff3,X_diff4], axis=2) Y_DIFF = np.stack([Y_diff0, Y_diff1, Y_diff2, Y_diff3,", "edge_attr_boundary = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) else: edge_attr_boundary = np.zeros((self.n_edges_boundary, 2*self.d+2)) edge_attr_boundary[:,0:2*self.d] = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) edge_attr_boundary[:, 2", "j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) def forward(self, x): for j, l in enumerate(self.layers):", "!= self.n_layers - 1: x = torch.sin(x) return x # generate graphs on", "dtype=torch.float) return X, edge_index, edge_attr def grid_edge_aug(n_x, n_y, a): a = a.reshape(n_x, n_y)", "edge_index, edge_attr def multi_grid(depth, n_x, n_y, grid, params): edge_index_global = [] edge_attr_global =", "r_l n_l = s_l print('level',s_l,r_l,n_l) xs = np.linspace(0.0, 1.0, s_l) grid_l = xs", "Y_DIFF = np.stack([Y_diff0, Y_diff1, Y_diff2, Y_diff3, Y_diff4], axis=2) pwd = np.min(PWD, axis=2) pwd_index", "* ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='wrap') out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,)", "def set_torch(self, to_torch): self.to_torch = to_torch def set_float(self, to_float): self.to_float = to_float #", "sample_idx=index_split) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def sampleT(self, theta, Y,", "dtype=torch.long).reshape(1, 2) if params==None: data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) else:", "1: if normalize: self.layers.append(nn.BatchNorm1d(layers[j+1])) self.layers.append(nonlinearity()) if out_nonlinearity is not None: self.layers.append(out_nonlinearity()) def forward(self,", "in range(self.level): edge_attr = self.grid_sample_all[self.edge_index[l].T].reshape((self.n_edges_inner[l], 2*self.d)) self.edge_attr.append(torch.tensor(edge_attr)) for l in range(self.level - 1):", "torch.tensor(self.edge_index_22, dtype=torch.long) def attributes(self, theta=None): if theta is None: edge_attr = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1))", "axes=range(len(x.shape) - 1, -1, -1)) if self.to_float: x = x.astype(np.float32) if self.to_torch: x", "edge_index_list, edge_index_list_cuda def get_edge_attr(grid, theta, edge_index): n_edges = edge_index.shape[1] edge_attr = np.zeros((n_edges, 4))", "+ self.edge_features * 2)) a = theta_split[:, :self.edge_features] edge_attr[:, 0] = X_difference.reshape(n_edges, )", "(1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub = theta[x::self.r, y::self.r, :].reshape(-1, theta_d)", "np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_12[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_12[:, 2", "be in shape of ntrain*n or ntrain*T*n or ntrain*n*T self.mean = torch.mean(x, 0)", "of data data = [] index = 0 for i in range(self.splits): if", "self.d = len(real_space) self.m = sample_size self.attr_features = attr_features assert len(mesh_size) == self.d", "np.meshgrid(xs, ys)]).T edge_index = [] edge_attr = [] for y in range(n_y): for", "resolution, r, m=100, radius=0.15, edge_features=1): super(DownsampleGridSplitter, self).__init__() # instead of randomly sample sub-grids,", "std = self.std[sample_idx] + self.eps # batch*n mean = self.mean[sample_idx] if len(self.mean.shape) >", "n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3 + self.edge_features *", "2))) edge_index.append((i2, i1)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) **", "-1) edge_attr[:, 4:4+self.edge_features] = a[edge_index[0]] edge_attr[:, 4+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr,", "self.n_edges_12 = self.edge_index_12.shape[1] self.n_edges_22 = self.edge_index_22.shape[1] return torch.tensor(self.edge_index, dtype=torch.long), \\ torch.tensor(self.edge_index_12, dtype=torch.long), \\", "sample(self): self.idx = [] self.grid_sample = [] perm = torch.randperm(self.n) index = 0", "0:2 * self.d] = self.grid_sample_all[self.edge_index[l].T].reshape( (self.n_edges_inner[l], 2 * self.d)) edge_attr[:, 2 * self.d]", "torch.tensor(edge_index_inter, dtype=torch.long) edge_index_inter = edge_index_inter.transpose(0,1) edge_index_list.append(edge_index_inter) edge_index_list_cuda.append(edge_index_inter.cuda()) print('edge_inter', edge_index_inter.shape) print(len(grid_list),len(edge_index_list),len(theta_list)) return grid_list, theta_list,", "self.m >= n_sub: m = self.m - n_sub perm = torch.randperm(self.n) idx =", "graph on Torus, with split and assemble class TorusGridSplitter(object): def __init__(self, grid, resolution,", "j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) if j != self.n_layers - 1: if normalize:", "<= r)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def gaussian_connectivity(self, sigma): pwd =", "self.d = d self.m = m self.l = l self.radius = radius assert", "= grid[:, 1] + 1 pwd2 = sklearn.metrics.pairwise_distances(grid, grid2) X_diff2, Y_diff2 = self.pairwise_difference(grid,", "i )) if a != None: a1 = a[x] a2 = a[x +", "torch.cat(edge_attr_global, dim=0) mask_index = torch.tensor(range(n_x * n_y), dtype=torch.long) # print('create multi_grid with size:',", "layers[j+1])) if j != self.n_layers - 1: if normalize: self.layers.append(nn.BatchNorm1d(layers[j+1])) self.layers.append(nonlinearity()) if out_nonlinearity", "= self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def get_grid(self): return torch.tensor(self.grid, dtype=torch.float) def attributes(self, f=None,", "r22)) + self.m self.n_edges = self.edge_index.shape[1] self.n_edges_12 = self.edge_index_12.shape[1] self.n_edges_22 = self.edge_index_22.shape[1] return", "i + n_x)) edge_index.append((i + n_x, i)) if a != None: a1 =", "in shape of ntrain*n or ntrain*T*n or ntrain*n*T self.mean = torch.mean(x, 0) self.std", "out[x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='wrap') out =", "data = data.reshape(-1, (grid_size // l) ** 2) return data def simple_grid(n_x, n_y):", "a1, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d / 0.1) **", "theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.resolution, self.resolution) x = torch.randint(0,self.r,(1,)) y = torch.randint(0,self.r,(1,))", "index2 = index2.repeat(2, axis = 0).repeat(2, axis = 1) index2 = torch.tensor(index2).reshape(-1) index2", "X, edge_index, edge_attr def grid_edge_aug(n_x, n_y, a): a = a.reshape(n_x, n_y) xs =", "self.n index_end = (index+self.ms[l]) % self.n if index < index_end: idx = self.perm[index:", "edge_attr=edge_attr, split_idx=split_idx)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def sample(self, theta, Y):", "self.layers.append(nn.Linear(layers[j], layers[j+1])) if j != self.n_layers - 1: if normalize: self.layers.append(nn.BatchNorm1d(layers[j+1])) self.layers.append(nonlinearity()) if", "/ n_y edge_index.append((i, i + n_x)) edge_index.append((i + n_x, i)) if a !=", "= None self._load_file() def _load_file(self): try: self.data = scipy.io.loadmat(self.file_path) self.old_mat = True except:", "or ntrain*n*T self.mean = torch.mean(x, 0) self.std = torch.std(x, 0) self.eps = eps", "grid3[:, :] = grid[:, :] + 1 pwd3 = sklearn.metrics.pairwise_distances(grid, grid3) X_diff3, Y_diff3", "self.attributes(theta=theta_a) x = torch.cat([grid_all, theta_all[idx_all,:] ], dim=1) data.append(Data(x=x, edge_index_mid=edge_index, edge_index_down=edge_index_down, edge_index_up=edge_index_up, edge_index_range=edge_index_range, edge_index_down_range=edge_index_down_range,", "% s_l # if (xj, yj) is a valid node if (x_j in", "else: grid_split = grid_sub theta_split = theta_sub Y_split = Y_sub.reshape(-1,) index_split = index_sub.reshape(-1,)", "self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T if self.m >", "Y_split = torch.cat([Y_sub, Y_sample], dim=1).reshape(self.T,-1) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1, ) X =", "self.edge_attr = [] self.edge_attr_down = [] self.edge_attr_up = [] if theta is None:", "range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i].reshape(batch_size2, self.m) for j in range(batch_size2):", "__init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet_sin, self).__init__() self.n_layers = len(layers) - 1 assert", "forward(self, x): for _, l in enumerate(self.layers): x = l(x) return x class", "t in range(self.T): for i in range(len(pred)): pred_i = pred[i][t].reshape(batch_size2, self.m) split_idx_i =", "1) s_l = s // r_l n_l = s_l print('level',s_l,r_l,n_l) xs = np.linspace(0.0,", "= self.grid.reshape(self.n, -1)[idx] theta_sample = theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.T, self.n)[:,idx] grid_split =", "radius self.edge_features = edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def ball_connectivity(self, grid): pwd", "grid == 'grid_edge': X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l, a) elif grid ==", "NN but they are not NearestNeighbor edge_index_inter = [] for x_i in range(s_l):", "scipy.ndimage import gaussian_filter ################################################# # # Utilities # ################################################# device = torch.device('cuda' if", "# self.boundary_connectivity2d() if f is None: if theta is None: edge_attr_boundary = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1))", "dtype=torch.long), \\ torch.tensor(self.edge_index_12, dtype=torch.long), \\ torch.tensor(self.edge_index_21, dtype=torch.long), \\ torch.tensor(self.edge_index_22, dtype=torch.long) def attributes(self, theta=None):", "self.n)[:,idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0) theta_split = torch.cat([theta_sub, theta_sample], dim=0) Y_split =", "a1 = a[x, y] a2 = a[x, y+1] edge_index.append((i, i + n_x)) edge_attr.append((d,", "# xs = np.array(range(n_x)) # ys = np.array(range(n_y)) grid = np.vstack([xx.ravel() for xx", "edge_index_up_out def get_edge_index_range(self): # in order to use graph network's data structure, #", "grid_edge(n_x, n_y, a=None): if a != None: a = a.reshape(n_x, n_y) xs =", "get_edge_index_range(self): # in order to use graph network's data structure, # the edge", "dtype=torch.long) # print('create multi_grid with size:', X.shape, edge_index.shape, edge_attr.shape, mask_index.shape) return (X, edge_index,", "connected graph for l in range(depth): h_x_l = n_x // (2 ** l)", "dim=0) edge_attr_down_out = torch.cat(self.edge_attr_down, dim=0) edge_attr_up_out = torch.cat(self.edge_attr_up, dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out", "edge_index, edge_index_down, edge_index_up = self.ball_connectivity(radius_inner, radius_inter) edge_index_range, edge_index_down_range, edge_index_up_range = self.get_edge_index_range() edge_attr, edge_attr_down,", ":self.edge_features] edge_attr[:, 0] = X_difference.reshape(n_edges, ) edge_attr[:, 1] = Y_difference.reshape(n_edges, ) edge_attr[:, 2]", "= index0 for l in range(self.level): index = index % self.n index_end =", "= Y_difference.reshape(n_edges, ) edge_attr[:, 2] = distance.reshape(n_edges, ) edge_attr[:, 3:3 + self.edge_features] =", "edge_attr_down_out, edge_attr_up_out # generate graph, with split and assemble class RandomGridSplitter(object): def __init__(self,", "layers[j+1])) def forward(self, x): for j, l in enumerate(self.layers): x = l(x) if", "# ################################################# device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # reading data class", "x is in shape of batch*n or T*batch*n x = (x * std)", "y::self.r,:].reshape(-1,2) theta_sub = theta[x::self.r, y::self.r,:].reshape(-1,theta_d) perm = torch.randperm(self.n) m = self.m - grid_sub.shape[0]", "in range(self.level - 1): edge_attr_down = self.grid_sample_all[self.edge_index_down[l].T].reshape((self.n_edges_inter[l], 2*self.d)) edge_attr_up = self.grid_sample_all[self.edge_index_up[l].T].reshape((self.n_edges_inter[l], 2*self.d)) self.edge_attr_down.append(torch.tensor(edge_attr_down))", "out_nonlinearity is not None: self.layers.append(out_nonlinearity()) def forward(self, x): for _, l in enumerate(self.layers):", "x2 = grid2[:,0] y2 = grid2[:,1] X1 = np.tile(x1.reshape(n, 1), [1, n]) X2", "self.std = torch.std(x) self.eps = eps def encode(self, x): x = (x -", "= self.std.cpu() # normalization, scaling by range class RangeNormalizer(object): def __init__(self, x, low=0.0,", "self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) else: edge_attr_boundary = np.zeros((self.n_edges_boundary, 2*self.d+2)) edge_attr_boundary[:,0:2*self.d] = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) edge_attr_boundary[:, 2 * self.d]", "= len(real_space) self.m = sample_sizes self.level = level assert len(sample_sizes) == level assert", "self.edge_features] = a[edge_index[0]] edge_attr[:, 3 + self.edge_features: 4 + self.edge_features * 2] =", "== self.T assert len(pred) == self.r**2 // batch_size2 out = torch.zeros((self.T, self.resolution,self.resolution)) for", "[] theta_list = [] edge_index_list = [] edge_index_list_cuda = [] level = int(np.log2(s)", "def encode(self, x): s = x.size() x = x.view(s[0], -1) x = self.a*x", "sample_size, induced_point): super(RandomTwoMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_size self.m_i = induced_point", "= x.size() x = x.view(s[0], -1) x = self.a*x + self.b x =", "!= None: a = a.reshape(n_x) xs = np.linspace(0.0, 1.0, n_x) # xs =", "[] num_nodes = 0 # build connected graph for l in range(depth): h_x_l", "= x.astype(np.float32) if self.to_torch: x = torch.from_numpy(x) if self.to_cuda: x = x.cuda() return", "x = x.view(s) return x #loss function with rel/abs Lp loss class LpLoss(object):", "= 0 for l in range(self.level-1): edge_index_down_range[l, 0] = n_edge_index edge_index_up_range[l, 0] =", "assert len(mesh_size) == self.d if self.d == 1: self.n = mesh_size[0] self.grid =", "sub-grids, here we downsample sub-grids self.grid = grid.reshape(resolution, resolution,2) # self.theta = theta.reshape(resolution,", "torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges,", "return out.reshape(-1,) # generate multi-level graph, with split and assemble class RandomMultiMeshSplitter(object): def", "assembler(self, out_list, sample_idx_list, is_cuda=False): assert len(out_list) == self.splits if is_cuda: pred = torch.zeros(self.n,", "self.m = sample_size self.attr_features = attr_features assert len(mesh_size) == self.d if self.d ==", "index_sub.reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split)", "X_diff4, Y_diff4 = self.pairwise_difference(grid, grid4) PWD = np.stack([pwd0,pwd1,pwd2,pwd3,pwd4], axis=2) X_DIFF = np.stack([X_diff0,X_diff1,X_diff2,X_diff3,X_diff4], axis=2)", "self.grid_sample_all[self.edge_index_down[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_down[:, 2 * self.d] = theta[self.edge_index_down[l][0]] edge_attr_down[:, 2", "# normalization, scaling by range class RangeNormalizer(object): def __init__(self, x, low=0.0, high=1.0): super(RangeNormalizer,", "2 * self.d] = theta[self.edge_index_22[0]] edge_attr_22[:, 2 * self.d + 1] = theta[self.edge_index_22[1]]", "edge_attr.shape, mask_index.shape) return (X, edge_index, edge_attr, mask_index, num_nodes) def multi_pole_grid1d(theta, theta_d, s, N,", "= torch.randperm(self.n) self.idx = perm[:self.m] self.idx_i = perm[self.m: self.m+self.m_i] self.idx_both = perm[: self.m+self.m_i]", "[] def sample(self): self.idx = [] self.grid_sample = [] perm = torch.randperm(self.n) index", "instead of list # we concatenate the edge index list and label the", "else: return torch.sum(diff_norms/y_norms) return diff_norms/y_norms def __call__(self, x, y): return self.rel(x, y) #", "grid): pwd0 = sklearn.metrics.pairwise_distances(grid, grid) X_diff0, Y_diff0 = self.pairwise_difference(grid, grid) grid1 = grid", "mask_index, num_nodes) def multi_pole_grid1d(theta, theta_d, s, N, is_periodic=False): grid_list = [] theta_list =", "= s // r_l n_l = s_l print('level',s_l,r_l,n_l) xs = np.linspace(0.0, 1.0, s_l)", "1] = theta[self.edge_index_down[l][1]] self.edge_attr_down.append(torch.tensor(edge_attr_down, dtype=torch.float)) edge_attr_up[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_up[l].T].reshape( (self.n_edges_inter[l], 2", "= torch.cat([edge_index_inter1, edge_index_inter2], dim=1) edge_attr_inter1 = torch.tensor((0, 0, 1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter2 =", "x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='wrap') out =", "= len(real_space) self.m = sample_size self.m_i = induced_point assert len(mesh_size) == self.d if", "1] = Y_difference.reshape(n_edges, ) edge_attr[:, 2] = distance.reshape(n_edges, ) edge_attr[:, 3:3 + self.edge_features]", "dtype=torch.long) def get_grid(self): return torch.tensor(self.grid, dtype=torch.float) def attributes(self, f=None, theta=None): if f is", "= f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index_boundary[0]], theta[self.edge_index_boundary[1]]) return torch.tensor(edge_attr_boundary, dtype=torch.float)", "theta is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) else: edge_attr = np.zeros((self.n_edges, 2*self.d+2)) edge_attr[:,0:2*self.d] =", "self.p = p self.reduction = reduction self.size_average = size_average def abs(self, x, y):", "- 1): edge_index.append((i, i + n_x)) edge_attr.append((0, 1, 0)) edge_index.append((i + n_x, i))", "grid, resolution, r, m=100, radius=0.15, edge_features=1): super(DownsampleGridSplitter, self).__init__() # instead of randomly sample", "grid_sub theta_split = theta_sub Y_split = Y_sub.reshape(-1,) index_split = index_sub.reshape(-1,) X = torch.cat([grid_split,", "torch.tensor(edge_index, dtype=torch.long), n_edges def get_data(self, theta): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution,", "Y.reshape(self.n, )[idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0) theta_split = torch.cat([theta_sub, theta_sample], dim=0) Y_split", "def grid_edge(n_x, n_y, a=None): if a != None: a = a.reshape(n_x, n_y) xs", "y = split_idx_i[j] if self.resolution%2==1: if x==0: nx = self.s else: nx =", "a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2) if params==None:", "-1)) else: theta = theta[self.idx] edge_attr = np.zeros((self.n_edges, 2 * self.d + 2*self.attr_features))", "self.d + self.attr_features] = theta[self.edge_index[0]].view(-1, self.attr_features) edge_attr[:, 2 * self.d + self.attr_features: 2", "self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr_12 = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_21 = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_22 = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22,", "= resolution**d self.d = d self.m = m self.l = l self.radius =", "= len(real_space) self.m = sample_size self.attr_features = attr_features assert len(mesh_size) == self.d if", "else: self.s = int(resolution/r) self.r = r self.n = resolution**2 self.m = m", "self.edge_index_boundary.shape[1] return torch.tensor(self.edge_index_boundary, dtype=torch.long) def attributes_boundary(self, f=None, theta=None): # if self.edge_index_boundary == None:", "def attributes(self, theta=None): self.edge_attr = [] self.edge_attr_down = [] self.edge_attr_up = [] if", "self.radius = radius self.edge_features = edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def pairwise_difference(self,grid1,", "graph network's data structure, # the edge index shall be stored as tensor", "2 * self.d] = theta[self.edge_index_12[0]] edge_attr_12[:, 2 * self.d + 1] = theta[self.edge_index_12[1]]", "X, edge_index, edge_attr def multi_grid(depth, n_x, n_y, grid, params): edge_index_global = [] edge_attr_global", "= torch.tensor(edge_index_nn, dtype=torch.long) edge_index_nn = edge_index_nn.transpose(0,1) edge_index_list.append(edge_index_nn) edge_index_list_cuda.append(edge_index_nn.cuda()) print('edge', edge_index_nn.shape) # we then", "1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter2 = torch.tensor((0, 0,-1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter = torch.cat([edge_attr_inter1, edge_attr_inter2],", "dtype=torch.long) index1 = index1 + num_nodes num_nodes += n_l # #construct inter-graph edge", "= torch.randint(0, self.r, (1,)) y = torch.randint(0, self.r, (1,)) grid_sub = self.grid[x::self.r, y::self.r,", "\\ torch.tensor(edge_attr_12, dtype=torch.float), \\ torch.tensor(edge_attr_21, dtype=torch.float), \\ torch.tensor(edge_attr_22, dtype=torch.float) # generate multi-level graph", "normalize=False): super(DenseNet_sin, self).__init__() self.n_layers = len(layers) - 1 assert self.n_layers >= 1 self.layers", "= theta[self.edge_index[l][0]] edge_attr[:, 2 * self.d + 1] = theta[self.edge_index[l][1]] self.edge_attr.append(torch.tensor(edge_attr, dtype=torch.float)) for", "len(real_space) self.m = sample_size self.attr_features = attr_features assert len(mesh_size) == self.d if self.d", "i1 in range(n): x1 = grid[i1] for i2 in range(n): x2 = grid[i2]", "len(split_idx) assert len(pred[0]) == self.T assert len(pred) == self.r**2 // batch_size2 out =", "self.d] = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 * self.d : 2 * self.d +", "edge_attr_out, edge_attr_down_out, edge_attr_up_out def splitter(self, radius_inner, radius_inter, theta_a, theta_all): # give a test", "x # generate graphs on square domain class SquareMeshGenerator(object): def __init__(self, real_space, mesh_size):", "edge_index.shape[1] edge_index = torch.tensor(edge_index, dtype=torch.long) if edge_features == 0: edge_attr = grid_sample[edge_index.T].reshape(n_edges, -1)", "self.sample(new_sample=False, index0=index) index = (index + self.m) % self.n grid, grid_all = self.get_grid()", "(2 ** l) h_y_l = n_y // (2 ** l) n_l = h_x_l", "= torch.cat(X_global, dim=0) edge_index = torch.cat(edge_index_global, dim=1) edge_attr = torch.cat(edge_attr_global, dim=0) mask_index =", "2 * self.d + 1] = theta[self.edge_index_22[1]] return torch.tensor(edge_attr, dtype=torch.float), \\ torch.tensor(edge_attr_12, dtype=torch.float),", "n_x)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d", "2 * self.d] = theta[self.edge_index_21[0]] edge_attr_21[:, 2 * self.d + 1] = theta[self.edge_index_21[1]]", "4+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4+self.edge_features] =", "y_norms = torch.norm(y.reshape(num_examples,-1), self.p, 1) if self.reduction: if self.size_average: return torch.mean(diff_norms/y_norms) else: return", "/ (self.std + self.eps) return x def decode(self, x, sample_idx=None): x = (x", "list and label the range of each level edge_index_range = torch.zeros((self.level,2), dtype=torch.long) edge_index_down_range", "self.d if self.d == 1: self.n = mesh_size[0] self.grid = np.linspace(real_space[0][0], real_space[0][1], self.n).reshape((self.n,", "self.d] = self.grid_sample_all[self.edge_index_down[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_down[:, 2 * self.d] = theta[self.edge_index_down[l][0]]", "= torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) if params==None: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) else:", "= level assert len(sample_sizes) == level assert len(mesh_size) == self.d if self.d ==", "self.pairwise_difference(grid, grid1) grid2 = grid grid2[:, 1] = grid[:, 1] + 1 pwd2", "0 for i in range(self.splits): if i==0: idx, idx_all = self.sample(new_sample=True, index0=index) else:", "1)) edge_attr.append((1, 0, 0)) edge_index.append((i + 1, i)) edge_attr.append((-1, 0, 0)) if (y", "self.to_cuda = to_cuda self.to_float = to_float self.file_path = file_path self.data = None self.old_mat", "::l, ::l] data = data.reshape(-1, (grid_size // l) ** 2) return data def", "number of sub-grid def get_data(self, theta, edge_features=1): data = [] for i in", "grid1) X_diff1, Y_diff1 = self.pairwise_difference(grid, grid1) grid2 = grid grid2[:, 1] = grid[:,", "by range class RangeNormalizer(object): def __init__(self, x, low=0.0, high=1.0): super(RangeNormalizer, self).__init__() mymin =", "print('create multi_grid with size:', X.shape, edge_index.shape, edge_attr.shape, mask_index.shape) return (X, edge_index, edge_attr, mask_index,", "torch.cat([index_sub, idx], dim=0).reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) else: grid_split = grid_sub theta_split", "xx in np.meshgrid(*grids)]).T if self.m > self.n: self.m = self.n self.idx = np.array(range(self.n))", "= np.vstack(np.where(pwd <= radius_inner[l])) + index self.edge_index.append(edge_index) edge_index_out.append(torch.tensor(edge_index, dtype=torch.long)) self.n_edges_inner.append(edge_index.shape[1]) index = index", "of list # we concatenate the edge index list and label the range", "the edge index list and label the range of each level edge_index_range =", "= torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge(n_x, n_y, a=None): if a", "xs = np.linspace(0.0, 1.0, n_x) # xs = np.array(range(n_x)) # ys = np.array(range(n_y))", "(x_j in range(s_l)): edge_index_nn.append([x_i,x_j]) edge_index_nn = torch.tensor(edge_index_nn, dtype=torch.long) edge_index_nn = edge_index_nn.transpose(0,1) edge_index_list.append(edge_index_nn) edge_index_list_cuda.append(edge_index_nn.cuda())", "reading data class MatReader(object): def __init__(self, file_path, to_torch=True, to_cuda=False, to_float=True): super(MatReader, self).__init__() self.to_torch", "= [] edge_index_list_cuda = [] level = int(np.log2(s) - 1) print(level) for l", "self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 * self.d] = theta[self.edge_index[0]] edge_attr[:, 2 * self.d +", "sample_sizes): super(RandomMultiMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_sizes self.level = level assert", "self.d = len(real_space) self.s = mesh_size[0] assert len(mesh_size) == self.d if self.d ==", "pred[sample_idx_list[i]] = out_list[i].reshape(-1) return pred # generate graph, with split and assemble with", "x1 = grid1[:,0] y1 = grid1[:,1] x2 = grid2[:,0] y2 = grid2[:,1] X1", "self.std[sample_idx] + self.eps # batch*n mean = self.mean[sample_idx] if len(self.mean.shape) > len(sample_idx[0].shape): std", "2) % n_x edge_index.append((i, i2)) edge_index.append((i2, i )) if a != None: a1", "if grid == 'grid': X, edge_index_inner, edge_attr_inner = grid(h_y_l, h_x_l) elif grid ==", "distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3 + self.edge_features * 2))", "torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge(n_x,", "Y_split = Y_sub.reshape(self.T, -1) index_split = index_sub.reshape(-1, ) X = torch.cat([grid_split, theta_split], dim=1)", "= len(layers) - 1 assert self.n_layers >= 1 self.layers = nn.ModuleList() for j", "+ self.grid_sample[l].shape[0] edge_index_out = torch.cat(edge_index_out, dim=1) edge_index_down_out = torch.cat(edge_index_down_out, dim=1) edge_index_up_out = torch.cat(edge_index_up_out,", "x def set_cuda(self, to_cuda): self.to_cuda = to_cuda def set_torch(self, to_torch): self.to_torch = to_torch", "= 2 ** (l - 1) s_l = s // r_l n_l =", ")) i2 = (x + 2) % n_x edge_index.append((i, i2)) edge_index.append((i2, i ))", "+ 1] = theta[self.edge_index_21[1]] edge_attr_22 = np.zeros((self.n_edges_22, 3 * self.d)) edge_attr_22[:, 0:2 *", "theta, params=None): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) data = []", "= 0 for l in range(self.level): edge_index_range[l, 0] = n_edge_index n_edge_index = n_edge_index", "+ 2)) edge_attr_up = np.zeros((self.n_edges_inter[l], 2 * self.d + 2)) edge_attr_down[:, 0:2 *", "= split_idx_i[j,:].reshape(-1,) out[idx] = out[idx] + pred_ij out = out / self.l #", "edge_attr.append((x / n_x, y / n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x, a2, a1)) if", "batch_size2 out = torch.zeros((self.resolution,self.resolution)) for i in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i", "is None: edge_attr = f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]])", "dtype=torch.float), \\ torch.tensor(edge_attr_22, dtype=torch.float) # generate multi-level graph class RandomMultiMeshGenerator(object): def __init__(self, real_space,", "(1,)) y = torch.randint(0, self.r, (1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub", "theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.resolution, self.resolution) x", "X_global.append(X) # construct edges index1 = torch.tensor(range(n_l), dtype=torch.long) index1 = index1 + num_nodes", "a1 = a[x, y] a2 = a[x + 1, y] edge_index.append((i, i +", "edge_index_inter2], dim=1) edge_attr_inter1 = torch.tensor((0, 0, 1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter2 = torch.tensor((0, 0,-1),", "= reduction self.size_average = size_average def abs(self, x, y): num_examples = x.size()[0] #Assume", "idx = perm[j,:].reshape(-1,) grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] X = torch.cat([grid_sample,theta_sample],dim=1) pwd", "// self.m # number of sub-grid if self.splits * self.m < self.n: self.splits", "y in range(self.r): grid_sub = self.grid[x::self.r, y::self.r,:].reshape(-1,2) theta_sub = theta[x::self.r, y::self.r,:].reshape(-1,theta_d) perm =", "edge_attr[:, 2 * self.d] = theta[self.edge_index[0]] edge_attr[:, 2 * self.d + 1] =", "** l)) if grid == 'grid': X, edge_index_inner, edge_attr_inner = grid(h_y_l, h_x_l) elif", "self.reduction: if self.size_average: return torch.mean(all_norms) else: return torch.sum(all_norms) return all_norms def rel(self, x,", "+ pred_ij out = out / self.l # out = gaussian_filter(out, sigma=sigma, mode='constant',", "if their parents are NN if abs(x_i//2 - x_j//2)%(s_l//2) <=1: edge_index_inter.append([x_i,x_j]) edge_index_inter =", "split_idx[i].reshape(batch_size2, self.m) for j in range(batch_size2): pred_ij = pred_i[j,:].reshape(-1,) idx = split_idx_i[j,:].reshape(-1,) out[idx]", "Y_sub.reshape(-1,) index_split = index_sub.reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges = self.ball_connectivity(grid_split)", "yj) is a valid node if is_periodic: x_j = x_j % s_l if", "data def simple_grid(n_x, n_y): xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0,", "n_x, a2, a1)) X = torch.tensor(xs, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index", "xx in np.meshgrid(*grids)]).T self.idx = [] self.idx_all = None self.grid_sample = [] self.grid_sample_all", "torch.sum(diff_norms/y_norms) return diff_norms/y_norms def __call__(self, x, y): return self.rel(x, y) # A simple", "resolution,1) self.resolution = resolution if resolution%2==1: self.s = int(((resolution - 1)/r) + 1)", "edge_index.append((i, i + 1)) edge_attr.append((1, 0, 0)) edge_index.append((i + 1, i)) edge_attr.append((-1, 0,", "!= n_y - 1): d = 1 / n_y edge_index.append((i, i + n_x))", "= np.vstack(np.where(pwd22 <= r22)) + self.m self.n_edges = self.edge_index.shape[1] self.n_edges_12 = self.edge_index_12.shape[1] self.n_edges_22", "= a[i1] a2 = a[i2] edge_index.append((i1, i2)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1", "self.n_layers >= 1 self.layers = nn.ModuleList() for j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) if", "# X = torch.cat([X, X_l], dim=1) X_global.append(X) # construct edges index1 = torch.tensor(range(n_l),", "== None: # self.boundary_connectivity2d() if f is None: if theta is None: edge_attr_boundary", "= [] level = int(np.log2(s) - 1) print(level) for l in range(1, level+1):", "attributes(self, theta=None): self.edge_attr = [] self.edge_attr_down = [] self.edge_attr_up = [] if theta", "pred_ij = pred_i[idx : idx + nx * ny] out[t, x::self.r, y::self.r] =", "theta_sub = theta[x::self.r, y::self.r,:].reshape(-1,theta_d) perm = torch.randperm(self.n) m = self.m - grid_sub.shape[0] idx", "np.linspace(0.0, 1.0, n_y) # xs = np.array(range(n_x)) # ys = np.array(range(n_y)) grid =", "self.n self.idx = np.array(range(self.n)) self.idx_i = self.idx self.idx_both = self.idx self.grid_sample = self.grid", "eps=0.00001): super(GaussianNormalizer, self).__init__() self.mean = torch.mean(x) self.std = torch.std(x) self.eps = eps def", "self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T if self.m > self.n: self.m =", "sklearn.metrics.pairwise_distances(grid, grid) X_diff0, Y_diff0 = self.pairwise_difference(grid, grid) grid1 = grid grid1[:,0] = grid[:,0]+1", "edge_attr def grid_edge1d(n_x, a=None): if a != None: a = a.reshape(n_x) xs =", "= torch.tensor(xs, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index, dtype=torch.long).transpose(0, 1)", "= self.mean.cpu() self.std = self.std.cpu() # normalization, Gaussian class GaussianNormalizer(object): def __init__(self, x,", "n_edges = self.ball_connectivity(grid_split) edge_attr = np.zeros((n_edges, 4+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, :4]", "grid = self.grid_sample pwd0 = sklearn.metrics.pairwise_distances(grid, grid) grid1 = grid grid1[:,0] = grid[:,0]+1", "= 0 for l in range(self.level): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l]) edge_index = np.vstack(np.where(pwd <=", "boundary4 = np.array(range(2 * s - 1, n, s)) self.boundary = np.concatenate([boundary1, boundary2,", "= torch.sin(x) return x # generate graphs on square domain class SquareMeshGenerator(object): def", "self.grid_sample.append(self.grid[self.idx[l]]) index = index+self.m[l] self.idx_all = perm[:index] self.grid_sample_all = self.grid[self.idx_all] return self.idx, self.idx_all", "np.exp(-(d) ** 2), np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2)))", "list # we concatenate the edge index list and label the range of", "self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) rbf = np.exp(-pwd**2/sigma**2)", "l in range(self.level): edge_attr = self.grid_sample_all[self.edge_index[l].T].reshape((self.n_edges_inner[l], 2*self.d)) self.edge_attr.append(torch.tensor(edge_attr)) for l in range(self.level -", "= file_path self.data = None self.old_mat = None self._load_file() def _load_file(self): try: self.data", "SquareMeshGenerator(object): def __init__(self, real_space, mesh_size): super(SquareMeshGenerator, self).__init__() self.d = len(real_space) self.s = mesh_size[0]", "1: x = torch.sin(x) return x # generate graphs on square domain class", "theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.T, self.n)[:,idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0) theta_split =", "self.edge_features * 2)) a = theta_split[:, :self.edge_features] edge_attr[:, 0] = X_difference.reshape(n_edges, ) edge_attr[:,", "in range(s_l)): edge_index_nn.append([x_i,x_j]) edge_index_nn = torch.tensor(edge_index_nn, dtype=torch.long) edge_index_nn = edge_index_nn.transpose(0,1) edge_index_list.append(edge_index_nn) edge_index_list_cuda.append(edge_index_nn.cuda()) print('edge',", "in range(self.num): idx = perm[j,:].reshape(-1,) grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] X =", "2), np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) if (y", "index_end if index0 < index_end: idx_all = self.perm[index0: index_end] else: idx_all = torch.cat((self.perm[index0:],", "in shape of batch*n or T*batch*n x = (x * std) + mean", "theta.reshape(self.resolution, self.resolution, theta_d) data = [] for x in range(self.r): for y in", "LpLoss(object): def __init__(self, d=2, p=2, size_average=True, reduction=True): super(LpLoss, self).__init__() #Dimension and Lp-norm type", "edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d /", "torch.tensor(self.edge_index, dtype=torch.long) def get_grid(self): return torch.tensor(self.grid, dtype=torch.float) def attributes(self, f=None, theta=None): if f", "edge_index_inner = edge_index_inner + num_nodes edge_index_global.append(edge_index_inner) edge_attr_global.append(edge_attr_inner) # construct X # if (is_high):", "self.idx_both = perm[: self.m+self.m_i] self.grid_sample = self.grid[self.idx] self.grid_sample_i = self.grid[self.idx_i] self.grid_sample_both = self.grid[self.idx_both]", "in range(self.splits): if i==0: idx, idx_all = self.sample(new_sample=True, index0=index) else: idx, idx_all =", "= [] if (new_sample) or (self.perm is None): self.perm = torch.randperm(self.n) index =", "if params==None: data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) else: data =", "X = torch.cat([torch.zeros(n_l, l * 2), X, torch.zeros(n_l, (depth - 1 - l)", "+ 1, i)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) **", "self.old_mat = None self._load_file() def _load_file(self): try: self.data = scipy.io.loadmat(self.file_path) self.old_mat = True", "= self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) rbf =", "Y = Y.reshape(self.resolution, self.resolution) x = torch.randint(0,self.r,(1,)) y = torch.randint(0,self.r,(1,)) grid_sub = self.grid[x::self.r,", "a) elif grid == 'grid_edge_aug': X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l, a) #", "= torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) print('test', len(data), X.shape,", "params): edge_index_global = [] edge_attr_global = [] X_global = [] num_nodes = 0", "split and assemble class TorusGridSplitter(object): def __init__(self, grid, resolution, r, m=100, radius=0.15, T=None,", "n_edge_index + self.edge_index[l].shape[1] edge_index_range[l, 1] = n_edge_index n_edge_index = 0 for l in", "scipy.io.loadmat(self.file_path) self.old_mat = True except: self.data = h5py.File(self.file_path) self.old_mat = False def load_file(self,", "= edge_index_nn.transpose(0,1) edge_index_list.append(edge_index_nn) edge_index_list_cuda.append(edge_index_nn.cuda()) print('edge', edge_index_nn.shape) # we then compute the interactive neighbors", "<= r11)) self.edge_index_12 = np.vstack(np.where(pwd12 <= r12)) self.edge_index_12[1,:] = self.edge_index_12[1,:] + self.m self.edge_index_21", "self.grid_sample_all[self.edge_index[l].T].reshape((self.n_edges_inner[l], 2*self.d)) self.edge_attr.append(torch.tensor(edge_attr)) for l in range(self.level - 1): edge_attr_down = self.grid_sample_all[self.edge_index_down[l].T].reshape((self.n_edges_inter[l], 2*self.d))", "< index_end: idx_all = self.perm[index0: index_end] else: idx_all = torch.cat((self.perm[index0:], self.perm[: index_end]), dim=0)", "self.edge_index[:, self.edge_index[0] >= self.edge_index[1]] print(self.edge_index.shape) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def torus1d_connectivity(self,", "edge_attr = np.zeros((n_edges, 3 + self.edge_features * 2)) a = theta_split[:, :self.edge_features] edge_attr[:,", "self.std = torch.std(x, 0) self.eps = eps def encode(self, x): x = (x", "[] self.grid_sample = [] if (new_sample) or (self.perm is None): self.perm = torch.randperm(self.n)", "self.idx = np.array(range(self.n)) self.idx_i = self.idx self.idx_both = self.idx self.grid_sample = self.grid self.grid_sample_i", "edge_attr_boundary[:,0:2*self.d] = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) edge_attr_boundary[:, 2 * self.d] = theta[self.edge_index_boundary[0]] edge_attr_boundary[:, 2 * self.d", "return torch.tensor(self.edge_index, dtype=torch.long) def torus1d_connectivity(self, r): grid = self.grid_sample pwd0 = sklearn.metrics.pairwise_distances(grid, grid)", "for l in range(self.level): index = index % self.n index_end = (index+self.ms[l]) %", "r, m=100, radius=0.15, T=None, edge_features=1, ): super(TorusGridSplitter, self).__init__() self.grid = grid.reshape(resolution, resolution,2) #", "2 * self.d)) edge_attr_down[:, 2 * self.d] = theta[self.edge_index_down[l][0]] edge_attr_down[:, 2 * self.d", "edge_attr = [] for i1 in range(n): x1 = grid[i1] for i2 in", "X = torch.cat([X, X_l], dim=1) X_global.append(X) # construct edges index1 = torch.tensor(range(n_l), dtype=torch.long)", "0).repeat(2, axis = 1) index2 = torch.tensor(index2).reshape(-1) index2 = index2 + num_nodes index2", "= n_edge_index n_edge_index = 0 for l in range(self.level-1): edge_index_down_range[l, 0] = n_edge_index", "(index + self.m) % self.n grid, grid_all = self.get_grid() edge_index, edge_index_down, edge_index_up =", "nearest neighbors (NN) if l==1: edge_index_nn = [] for x_i in range(s_l): for", "1] = n_edge_index return edge_index_range, edge_index_down_range, edge_index_up_range def attributes(self, theta=None): self.edge_attr = []", "with downsample class DownsampleGridSplitter(object): def __init__(self, grid, resolution, r, m=100, radius=0.15, edge_features=1): super(DownsampleGridSplitter,", "is_periodic=False): grid_list = [] theta_list = [] edge_index_list = [] edge_index_list_cuda = []", "edge_index_down_range, edge_index_up_range def attributes(self, theta=None): self.edge_attr = [] self.edge_attr_down = [] self.edge_attr_up =", "(y != n_y - 1): edge_index.append((i, i + n_x)) edge_attr.append((0, 1, 0)) edge_index.append((i", "X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def assemble(self, pred, split_idx, batch_size2, sigma=1):", "* n_x + x if (x != n_x - 1): d = 1", "return self.rel(x, y) # A simple feedforward neural network class DenseNet(torch.nn.Module): def __init__(self,", "if f is None: if theta is None: edge_attr_boundary = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) else: edge_attr_boundary", "# # Utilities # ################################################# device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') #", "grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] X = torch.cat([grid_sample,theta_sample],dim=1) pwd = sklearn.metrics.pairwise_distances(grid_sample) edge_index", "= self.mean.cpu() self.std = self.std.cpu() # normalization, scaling by range class RangeNormalizer(object): def", "theta[self.edge_index[1]] edge_attr_12 = np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_12[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12,", "= self.grid_sample_all[self.edge_index_up[l].T].reshape((self.n_edges_inter[l], 2*self.d)) self.edge_attr_down.append(torch.tensor(edge_attr_down)) self.edge_attr_up.append(torch.tensor(edge_attr_up)) else: theta = theta[self.idx_all] for l in range(self.level):", "def get_data(self, theta): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) data =", "not None: self.layers.append(out_nonlinearity()) def forward(self, x): for _, l in enumerate(self.layers): x =", "theta = theta[self.idx] edge_attr = np.zeros((self.n_edges, 2 * self.d + 2*self.attr_features)) edge_attr[:, 0:2", "= Y.reshape(self.T, self.resolution, self.resolution) x = torch.randint(0, self.r, (1,)) y = torch.randint(0, self.r,", "self.layers.append(nonlinearity()) if out_nonlinearity is not None: self.layers.append(out_nonlinearity()) def forward(self, x): for _, l", "= index_sub.reshape(-1, ) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges, distance, X_difference, Y_difference", "m = self.m - grid_sub.shape[0] idx = perm[:m] grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample =", "[] self.n_edges_inner = [] self.n_edges_inter = [] edge_index_out = [] edge_index_down_out = []", "Y): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.resolution, self.resolution)", "if (y != n_y - 1): edge_index.append((i, i + n_x)) edge_attr.append((0, 1, 0))", "** 2), np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) X", "for l in range(depth): h_x_l = n_x // (2 ** l) h_y_l =", "xx in np.meshgrid(xs, ys)]).T edge_index = [] edge_attr = [] for y in", "2 * self.d + 1] = theta[self.edge_index_12[1]] edge_attr_21 = np.zeros((self.n_edges_12, 3 * self.d))", "-1)) edge_attr_22[:, 2 * self.d] = theta[self.edge_index_22[0]] edge_attr_22[:, 2 * self.d + 1]", "n_x) ys = np.linspace(0.0, 1.0, n_y) # xs = np.array(range(n_x)) # ys =", "None: std = self.std + self.eps # n mean = self.mean else: if", "= split_idx[i] for j in range(batch_size2): pred_ij = pred_i[j,:] x, y = split_idx_i[j]", "2), np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i2, i1))", "torch.tensor(edge_index_nn, dtype=torch.long) edge_index_nn = edge_index_nn.transpose(0,1) edge_index_list.append(edge_index_nn) edge_index_list_cuda.append(edge_index_nn.cuda()) print('edge', edge_index_nn.shape) # we then compute", "+ self.edge_features] = a[edge_index[0]] edge_attr[:, 4 + self.edge_features: 4 + self.edge_features * 2]", "and assemble class TorusGridSplitter(object): def __init__(self, grid, resolution, r, m=100, radius=0.15, T=None, edge_features=1,", "int(resolution/r) self.r = r self.n = resolution**2 self.m = m self.radius = radius", "n_y edge_index.append((i, i + n_x)) edge_index.append((i + n_x, i)) if a != None:", "(2 ** l) n_l = h_x_l * h_y_l a = downsample(params, n_x, (2", "return torch.tensor(edge_attr, dtype=torch.float) # # generate two-level graph class RandomTwoMeshGenerator(object): def __init__(self, real_space,", "len(boundary) vertice1 = np.array(range(self.n)) vertice1 = np.repeat(vertice1, boundary_size) vertice2 = np.tile(boundary, self.n) self.edge_index_boundary", "= theta[self.edge_index[1]].view(-1, self.attr_features) else: xy = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) if theta is None: edge_attr", "for xx in np.meshgrid(*grids)]).T self.splits = self.n // self.m # number of sub-grid", "grid_out.append(torch.tensor(grid, dtype=torch.float)) return grid_out, torch.tensor(self.grid_sample_all, dtype=torch.float) def ball_connectivity(self, radius_inner, radius_inter): assert len(radius_inner) ==", "grid_out, torch.tensor(self.grid_sample_all, dtype=torch.float) def ball_connectivity(self, radius_inner, radius_inter): assert len(radius_inner) == self.level assert len(radius_inter)", "edge_index_mid=edge_index, edge_index_down=edge_index_down, edge_index_up=edge_index_up, edge_index_range=edge_index_range, edge_index_down_range=edge_index_down_range, edge_index_up_range=edge_index_up_range, edge_attr_mid=edge_attr, edge_attr_down=edge_attr_down, edge_attr_up=edge_attr_up, sample_idx=idx[0])) return data def", "idx = perm[:m] grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] grid_split = torch.cat([grid_sub, grid_sample],dim=0)", "2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr,", "self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def pairwise_difference(self,grid1, grid2): n = grid1.shape[0] x1 =", "+ 1)) edge_attr.append((1, 0, 0)) edge_index.append((i + 1, i)) edge_attr.append((-1, 0, 0)) if", "f=None, theta=None): if f is None: if theta is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges,", "= f(xy[:, 0:self.d], xy[:, self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) # # generate", "for grid in self.grid_sample: grid_out.append(torch.tensor(grid, dtype=torch.float)) return grid_out, torch.tensor(self.grid_sample_all, dtype=torch.float) def ball_connectivity(self, radius_inner,", "edge_attr=edge_attr, split_idx=idx)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def assemble(self, pred, split_idx,", "real_space[j][1], mesh_size[j])) self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T if", "m self.radius = radius self.edge_features = edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def", "\\ torch.tensor(self.grid_sample_i, dtype=torch.float), \\ torch.tensor(self.grid_sample_both, dtype=torch.float) def ball_connectivity(self, r11, r12, r22): pwd =", "dim=0) theta_split = torch.cat([theta_sub, theta_sample], dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=0).reshape(-1,) index_split =", "edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3+self.edge_features*2)) a =", "= pred[i][t].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for j in range(batch_size2): pred_ij = pred_i[j,:]", "def forward(self, x): for j, l in enumerate(self.layers): x = l(x) if j", "self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] if is_forward: print(self.edge_index.shape) self.edge_index =", "self.edge_attr = [] self.edge_attr_down = [] self.edge_attr_up = [] self.n_edges_inner = [] self.n_edges_inter", "def cpu(self): self.mean = self.mean.cpu() self.std = self.std.cpu() # normalization, scaling by range", "[] edge_index_down_out = [] edge_index_up_out = [] index = 0 for l in", "= a[x + 1, y] edge_attr.append((x / n_x, y / n_y, a1, a2))", "grid = np.vstack([xx.ravel() for xx in np.meshgrid(xs, ys)]).T edge_index = [] edge_attr =", "sigma): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) rbf = np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf) self.edge_index = np.vstack(np.where(sample))", "= torch.tensor(edge_index_inter, dtype=torch.long) edge_index_inter = edge_index_inter.transpose(0,1) edge_index_list.append(edge_index_inter) edge_index_list_cuda.append(edge_index_inter.cuda()) print('edge_inter', edge_index_inter.shape) print(len(grid_list),len(edge_index_list),len(theta_list)) return grid_list,", "UnitGaussianNormalizer(object): def __init__(self, x, eps=0.00001): super(UnitGaussianNormalizer, self).__init__() # x could be in shape", "edge_index = torch.cat(edge_index_global, dim=1) edge_attr = torch.cat(edge_attr_global, dim=0) mask_index = torch.tensor(range(n_x * n_y),", "mesh_size, sample_size, induced_point): super(RandomTwoMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_size self.m_i =", "edge_index_out = torch.cat(edge_index_out, dim=1) edge_index_down_out = torch.cat(edge_index_down_out, dim=1) edge_index_up_out = torch.cat(edge_index_up_out, dim=1) return", "edge_index_down_range=edge_index_down_range, edge_index_up_range=edge_index_up_range, edge_attr_mid=edge_attr, edge_attr_down=edge_attr_down, edge_attr_up=edge_attr_up, sample_idx=idx[0])) return data def assembler(self, out_list, sample_idx_list, is_cuda=False):", "grid_sample[edge_index.T].reshape(n_edges, -1) edge_attr[:, 2*self.d] = a[edge_index[0]] edge_attr[:, 2*self.d+1] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr,", "= self.edge_index_12[[1,0],:] self.edge_index_22 = np.vstack(np.where(pwd22 <= r22)) + self.m self.n_edges = self.edge_index.shape[1] self.n_edges_12", "dtype=torch.long) def attributes_boundary(self, f=None, theta=None): # if self.edge_index_boundary == None: # self.boundary_connectivity2d() if", "Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, 0]", "edge_attr[:, 1] = Y_difference.reshape(n_edges, ) edge_attr[:, 2] = distance.reshape(n_edges, ) edge_attr[:, 3:3 +", "multi-level graph class RandomMultiMeshGenerator(object): def __init__(self, real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshGenerator, self).__init__() self.d", "range(self.num): idx = perm[j,:].reshape(-1,) grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] X = torch.cat([grid_sample,theta_sample],dim=1)", "edge_index_inter = [] for x_i in range(s_l): for x in range(-3,4): x_j =", "in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i].reshape(batch_size2, self.m) for j in", "np.zeros((self.n_edges_inner[l], 2 * self.d + 2)) edge_attr[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index[l].T].reshape( (self.n_edges_inner[l],", "torch.cat([theta_sub, theta_sample], dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=1).reshape(self.T,-1) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1,", "= torch.randperm(self.n) idx = perm[:m] grid_sample = self.grid.reshape(self.n, -1)[idx] theta_sample = theta.reshape(self.n, -1)[idx]", "import gaussian_filter ################################################# # # Utilities # ################################################# device = torch.device('cuda' if torch.cuda.is_available()", "[] self.edge_attr_down = [] self.edge_attr_up = [] if theta is None: for l", "out = out.cuda() for i in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i =", "= self.pairwise_difference(grid, grid) grid1 = grid grid1[:,0] = grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1)", "#Dimension and Lp-norm type are postive assert d > 0 and p >", "def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float) def ball_connectivity(self, r, is_forward=False): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) self.edge_index", "= torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def multi_grid(depth, n_x, n_y, grid, params):", "= np.zeros((self.n_edges_22, 3 * self.d)) edge_attr_22[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) edge_attr_22[:,", "in enumerate(self.layers): x = l(x) return x class DenseNet_sin(torch.nn.Module): def __init__(self, layers, nonlinearity,", "Y_split = Y_sub.reshape(-1,) index_split = index_sub.reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges,", "return self.idx, self.idx_i, self.idx_both def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float), \\ torch.tensor(self.grid_sample_i, dtype=torch.float), \\", "n_y) # xs = np.array(range(n_x)) # ys = np.array(range(n_y)) grid = np.vstack([xx.ravel() for", "+ num_nodes edge_index_global.append(edge_index_inner) edge_attr_global.append(edge_attr_inner) # construct X # if (is_high): # X =", "= self.s-1 if y==0: ny = self.s else: ny = self.s-1 else: nx", "None: edge_attr = f(xy[:, 0:self.d], xy[:, self.d:]) else: theta = theta[self.idx] edge_attr =", "assembleT(self, pred, split_idx, batch_size2, sigma=1): # pred is a list (batches) of list", "self.perm = None self.idx = [] self.idx_all = None self.grid_sample = [] self.grid_sample_all", "= x i1 = (x+1)%n_x edge_index.append((i, i1)) edge_index.append((i1, i )) i2 = (x", "edge_index = np.vstack(np.where(pwd <= self.radius)) pwd_index = pwd_index[np.where(pwd <= self.radius)] PWD_index = (np.where(pwd", "boundary3 = np.array(range(s, n, s)) boundary4 = np.array(range(2 * s - 1, n,", "are not NearestNeighbor edge_index_inter = [] for x_i in range(s_l): for x in", "idx_all = self.perm[index0: index_end] else: idx_all = torch.cat((self.perm[index0:], self.perm[: index_end]), dim=0) self.idx_all =", "torch.cuda.is_available() else 'cpu') # reading data class MatReader(object): def __init__(self, file_path, to_torch=True, to_cuda=False,", "if a != None: a1 = a[x, y] a2 = a[x, y+1] edge_attr.append((x/n_x,", "their parents are NN if abs(x_i//2 - x_j//2)%(s_l//2) <=1: edge_index_inter.append([x_i,x_j]) edge_index_inter = torch.tensor(edge_index_inter,", "concatenate the edge index list and label the range of each level edge_index_range", "3+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, 0] = X_difference.reshape(n_edges, ) edge_attr[:, 1] =", "if (x != n_x - 1): edge_index.append((i, i + 1)) edge_attr.append((1, 0, 0))", "- 1): edge_index.append((i, i + 1)) edge_attr.append((1, 0, 0)) edge_index.append((i + 1, i))", "1] = grid[:, 1] + 1 pwd2 = sklearn.metrics.pairwise_distances(grid, grid2) X_diff2, Y_diff2 =", "get_grid(self): return torch.tensor(self.grid, dtype=torch.float) def attributes(self, f=None, theta=None): if f is None: if", "l in range(self.level): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l]) edge_index = np.vstack(np.where(pwd <= radius_inner[l])) + index", "dtype=torch.long).reshape(1, 2) data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) print('train', X.shape, Y_split.shape,", "= a[i2] edge_index.append((i1, i2)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d)", "if j != self.n_layers - 1: if normalize: self.layers.append(nn.BatchNorm1d(layers[j+1])) self.layers.append(nonlinearity()) if out_nonlinearity is", "else: edge_attr = np.zeros((self.n_edges, 2*self.d+2)) edge_attr[:,0:2*self.d] = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) edge_attr[:, 2 * self.d] =", "edge_attr = grid_sample[edge_index.T].reshape(n_edges, -1) else: edge_attr = np.zeros((n_edges, 2*self.d+2)) a = theta_sample[:,0] edge_attr[:,", "= grid_sub theta_split = theta_sub Y_split = Y_sub.reshape(-1,) index_split = index_sub.reshape(-1,) X =", "d self.m = m self.l = l self.radius = radius assert self.n %", "grid1[:,1] x2 = grid2[:,0] y2 = grid2[:,1] X1 = np.tile(x1.reshape(n, 1), [1, n])", "torch.cat(edge_index_global, dim=1) edge_attr = torch.cat(edge_attr_global, dim=0) mask_index = torch.tensor(range(n_x * n_y), dtype=torch.long) #", "x.view(s) return x #loss function with rel/abs Lp loss class LpLoss(object): def __init__(self,", "\\ torch.tensor(self.grid_sample_both, dtype=torch.float) def ball_connectivity(self, r11, r12, r22): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) pwd12 =", "self.a*x + self.b x = x.view(s) return x def decode(self, x): s =", "edge_attr_out = torch.cat(self.edge_attr, dim=0) edge_attr_down_out = torch.cat(self.edge_attr_down, dim=0) edge_attr_up_out = torch.cat(self.edge_attr_up, dim=0) return", "data structure, # the edge index shall be stored as tensor instead of", "sklearn.metrics.pairwise_distances(self.grid_sample, self.grid_sample_i) pwd22 = sklearn.metrics.pairwise_distances(self.grid_sample_i) self.edge_index = np.vstack(np.where(pwd <= r11)) self.edge_index_12 = np.vstack(np.where(pwd12", "n_edges = edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges def get_data(self, theta): theta_d = theta.shape[1]", "// (2 ** l) n_l = h_x_l * h_y_l a = downsample(params, n_x,", "Y_DIFF[PWD_index] n_edges = edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges, distance, X_difference, Y_difference def get_data(self,", "edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4+self.edge_features] = a[edge_index[0]] edge_attr[:, 4+self.edge_features: 4+self.edge_features*2] =", "self.d + 1] = theta[self.edge_index_21[1]] edge_attr_22 = np.zeros((self.n_edges_22, 3 * self.d)) edge_attr_22[:, 0:2", ": 2 * self.d + self.attr_features] = theta[self.edge_index[0]].view(-1, self.attr_features) edge_attr[:, 2 * self.d", "2 ** (l - 1) s_l = s // r_l n_l = s_l", "= self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] X = torch.cat([grid_sample,theta_sample],dim=1) pwd = sklearn.metrics.pairwise_distances(grid_sample) edge_index =", "dim=1) else: grid_split = grid_sub theta_split = theta_sub Y_split = Y_sub.reshape(-1,) index_split =", "(depth - 1 - l) * 2)], dim=1) # else: # X_l =", "[] self.grid_sample_all = None self.edge_index = [] self.edge_index_down = [] self.edge_index_up = []", "= resolution**2 self.m = m self.radius = radius self.edge_features = edge_features self.index =", "np.zeros((n_edges, 4+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4", "np.array(range(self.n)) self.grid_sample = self.grid def sample(self): perm = torch.randperm(self.n) self.idx = perm[:self.m] self.grid_sample", "h = 1.0 / (x.size()[1] - 1.0) all_norms = (h**(self.d/self.p))*torch.norm(x.view(num_examples,-1) - y.view(num_examples,-1), self.p,", "i1 = (x+1)%n_x edge_index.append((i, i1)) edge_index.append((i1, i )) i2 = (x + 2)", "mesh_size, sample_size, attr_features=1): super(RandomMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_size self.attr_features =", "resolution%2==1: self.s = int(((resolution - 1)/r) + 1) else: self.s = int(resolution/r) self.r", "1): edge_attr_down = self.grid_sample_all[self.edge_index_down[l].T].reshape((self.n_edges_inter[l], 2*self.d)) edge_attr_up = self.grid_sample_all[self.edge_index_up[l].T].reshape((self.n_edges_inter[l], 2*self.d)) self.edge_attr_down.append(torch.tensor(edge_attr_down)) self.edge_attr_up.append(torch.tensor(edge_attr_up)) else: theta", "edge_index, edge_attr def grid_edge1d(n_x, a=None): if a != None: a = a.reshape(n_x) xs", "edge_attr def multi_grid(depth, n_x, n_y, grid, params): edge_index_global = [] edge_attr_global = []", "2 * self.d + 2)) edge_attr[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index[l].T].reshape( (self.n_edges_inner[l], 2", "self.mean.cuda() self.std = self.std.cuda() def cpu(self): self.mean = self.mean.cpu() self.std = self.std.cpu() #", "= torch.zeros(self.n, ) for i in range(self.splits): pred[sample_idx_list[i]] = out_list[i].reshape(-1) return pred #", ") edge_attr[:, 3:3 + self.edge_features] = a[edge_index[0]] edge_attr[:, 3 + self.edge_features: 4 +", "idx + nx * ny] out[x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out =", "or ntrain*T*n or ntrain*n*T self.mean = torch.mean(x, 0) self.std = torch.std(x, 0) self.eps", "y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data", "a = theta_split[:, :self.edge_features] edge_attr[:, 0] = X_difference.reshape(n_edges, ) edge_attr[:, 1] = Y_difference.reshape(n_edges,", "= [] self.grid_sample_all = None self.edge_index = [] self.edge_index_down = [] self.edge_index_up =", "** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i2, i1)) edge_attr.append((d, a2, a1, 1", "construct X # if (is_high): # X = torch.cat([torch.zeros(n_l, l * 2), X,", "n_y) grid = np.vstack([xx.ravel() for xx in np.meshgrid(xs, ys)]).T edge_index = [] edge_attr", "generate a list of data data = [] index = 0 for i", "edge_index_global = [] edge_attr_global = [] X_global = [] num_nodes = 0 #", "is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) else: edge_attr = np.zeros((self.n_edges, 2*self.d+2)) edge_attr[:,0:2*self.d] = self.grid[self.edge_index.T].reshape((self.n_edges,-1))", "radius=0.25): super(RandomGridSplitter, self).__init__() self.grid = grid self.resolution = resolution self.n = resolution**d self.d", "if (new_sample) or (self.perm is None): self.perm = torch.randperm(self.n) index = index0 for", "edge_attr_mid=edge_attr, edge_attr_down=edge_attr_down, edge_attr_up=edge_attr_up, sample_idx=idx[0])) return data def assembler(self, out_list, sample_idx_list, is_cuda=False): assert len(out_list)", "sigma=sigma, mode='wrap') out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) def assembleT(self, pred, split_idx, batch_size2,", "xy[:, self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) # # generate two-level graph class", "graph, with split and assemble class RandomGridSplitter(object): def __init__(self, grid, resolution, d=2, m=200,", "!= None: a1 = a[x, y] a2 = a[x + 1, y] edge_attr.append((x", "2))) edge_index.append((i + 1, i)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 * a2)),", "self.idx.append(perm[index: index+self.m[l]]) self.grid_sample.append(self.grid[self.idx[l]]) index = index+self.m[l] self.idx_all = perm[:index] self.grid_sample_all = self.grid[self.idx_all] return", "mean = self.mean[sample_idx] if len(self.mean.shape) > len(sample_idx[0].shape): std = self.std[:,sample_idx]+ self.eps # T*batch*n", "attr_features assert len(mesh_size) == self.d if self.d == 1: self.n = mesh_size[0] self.grid", "grid[i1] for i2 in range(n): x2 = grid[i2] d = np.linalg.norm(x1-x2) if(d<=r): a1", "= torch.norm(y.reshape(num_examples,-1), self.p, 1) if self.reduction: if self.size_average: return torch.mean(diff_norms/y_norms) else: return torch.sum(diff_norms/y_norms)", "edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l, a) # update index edge_index_inner = edge_index_inner +", "l) ** 2) return data def simple_grid(n_x, n_y): xs = np.linspace(0.0, 1.0, n_x)", "#Assume uniform mesh h = 1.0 / (x.size()[1] - 1.0) all_norms = (h**(self.d/self.p))*torch.norm(x.view(num_examples,-1)", "boundary4]) def boundary_connectivity2d(self, stride=1): boundary = self.boundary[::stride] boundary_size = len(boundary) vertice1 = np.array(range(self.n))", "= torch.tensor([x, y], dtype=torch.long).reshape(1, 2) if params==None: data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr,", "range(self.r): grid_sub = self.grid[x::self.r, y::self.r,:].reshape(-1,2) theta_sub = theta[x::self.r, y::self.r,:].reshape(-1,theta_d) perm = torch.randperm(self.n) m", "self.d] = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 * self.d] = theta[self.edge_index[0]] edge_attr[:, 2 *", "self.grid_sample = self.grid[self.idx] return self.idx def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float) def ball_connectivity(self, r,", "out.reshape(-1,) def assembleT(self, pred, split_idx, batch_size2, sigma=1): # pred is a list (batches)", "= Y_sub.reshape(-1,) index_split = index_sub.reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges, distance,", "self.d] = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) edge_attr_22[:, 2 * self.d] = theta[self.edge_index_22[0]] edge_attr_22[:, 2 *", "MatReader(object): def __init__(self, file_path, to_torch=True, to_cuda=False, to_float=True): super(MatReader, self).__init__() self.to_torch = to_torch self.to_cuda", "p=2, size_average=True, reduction=True): super(LpLoss, self).__init__() #Dimension and Lp-norm type are postive assert d", "0)) if (y != n_y - 1): edge_index.append((i, i + n_x)) edge_attr.append((0, 1,", "2 * self.d + 2*self.attr_features] = theta[self.edge_index[1]].view(-1, self.attr_features) else: xy = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1))", "batch_size2, sigma=1, cuda=False): assert len(pred) == len(split_idx) assert len(pred) == self.num * self.l", "pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i].reshape(batch_size2, self.m) for j in range(batch_size2): pred_ij = pred_i[j,:].reshape(-1,)", "idx = split_idx_i[j,:].reshape(-1,) out[idx] = out[idx] + pred_ij out = out / self.l", "self.n: self.splits = self.splits + 1 print('n:',self.n,' m:',self.m, ' number of splits:', self.splits", "def get_data(self, theta, params=None): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) data", "= resolution self.n = resolution**d self.d = d self.m = m self.l =", "return edge_index_out, edge_index_down_out, edge_index_up_out def get_edge_index_range(self): # in order to use graph network's", "range(self.splits): pred[sample_idx_list[i]] = out_list[i].reshape(-1) return pred # generate graph, with split and assemble", "edge_attr_down[:, 2 * self.d + 1] = theta[self.edge_index_down[l][1]] self.edge_attr_down.append(torch.tensor(edge_attr_down, dtype=torch.float)) edge_attr_up[:, 0:2 *", "self.m < self.n: self.splits = self.splits + 1 print('n:',self.n,' m:',self.m, ' number of", "dim=1) edge_index_up_out = torch.cat(edge_index_up_out, dim=1) return edge_index_out, edge_index_down_out, edge_index_up_out def get_edge_index_range(self): # in", "= distance.reshape(n_edges, ) edge_attr[:, 3:3 + self.edge_features] = a[edge_index[0]] edge_attr[:, 3 + self.edge_features:", "edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge(n_x, n_y, a=None): if", "# Utilities # ################################################# device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # reading", "x if (x != n_x - 1): d = 1 / n_x a1", "= np.linspace(0.0, 1.0, n_y) grid = np.vstack([xx.ravel() for xx in np.meshgrid(xs, ys)]).T edge_index", "edge_attr_out, edge_attr_down_out, edge_attr_up_out # generate graph, with split and assemble class RandomGridSplitter(object): def", "def cpu(self): self.mean = self.mean.cpu() self.std = self.std.cpu() # normalization, Gaussian class GaussianNormalizer(object):", "** 2))) edge_index.append((i + n_x, i)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 *", "dtype=torch.float), \\ torch.tensor(edge_attr_21, dtype=torch.float), \\ torch.tensor(edge_attr_22, dtype=torch.float) # generate multi-level graph class RandomMultiMeshGenerator(object):", "+ self.m self.n_edges = self.edge_index.shape[1] self.n_edges_12 = self.edge_index_12.shape[1] self.n_edges_22 = self.edge_index_22.shape[1] return torch.tensor(self.edge_index,", "grid_sub.shape[0] idx = perm[:m] grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] grid_split = torch.cat([grid_sub,", "encode(self, x): x = (x - self.mean) / (self.std + self.eps) return x", "pred_i = pred[i][t].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for j in range(batch_size2): pred_ij =", "attributes(self, f=None, theta=None): if f is None: if theta is None: edge_attr =", "= np.vstack(np.where(pwd <= r11)) self.edge_index_12 = np.vstack(np.where(pwd12 <= r12)) self.edge_index_12[1,:] = self.edge_index_12[1,:] +", "edge_attr[:, 2 * self.d] = theta[self.edge_index[0]] edge_attr[:, 2 * self.d +1] = theta[self.edge_index[1]]", "in range(self.level-1): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l], self.grid_sample[l+1]) edge_index = np.vstack(np.where(pwd <= radius_inter[l])) + index", "Y_diff2, Y_diff3, Y_diff4], axis=2) pwd = np.min(PWD, axis=2) pwd_index = np.argmin(PWD, axis=2) edge_index", "** 2))) edge_index.append((i + 1, i)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 *", "1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge_aug(n_x, n_y, a):", "1): d = 1 / n_y a1 = a[x, y] a2 = a[x,", "= f(xy[:, 0:self.d], xy[:, self.d:]) else: theta = theta[self.idx] edge_attr = f(xy[:, 0:self.d],", "def __call__(self, x, y): return self.rel(x, y) # A simple feedforward neural network", "self.idx = np.array(range(self.n)) self.grid_sample = self.grid def sample(self): perm = torch.randperm(self.n) self.idx =", "# ys = np.array(range(n_y)) grid = np.vstack([xx.ravel() for xx in np.meshgrid(xs, ys)]).T edge_index", "dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out # generate graph, with split and assemble class", "for i in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i].reshape(batch_size2, self.m) for", "1 / n_y edge_index.append((i, i + n_x)) edge_index.append((i + n_x, i)) if a", "self.T assert len(pred) == self.r**2 // batch_size2 out = torch.zeros((self.T, self.resolution,self.resolution)) for t", "#loss function with rel/abs Lp loss class LpLoss(object): def __init__(self, d=2, p=2, size_average=True,", "torch.cat([Y_sub, Y_sample], dim=0).reshape(-1,) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1)", "grid[:, :] + 1 pwd3 = sklearn.metrics.pairwise_distances(grid, grid3) X_diff3, Y_diff3 = self.pairwise_difference(grid, grid3)", "0] + 1 grid4[:, 1] = grid[:, 1] - 1 pwd4 = sklearn.metrics.pairwise_distances(grid,", "dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index,", "n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x, a2, a1)) if (y != n_y - 1):", "a2, a1)) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index =", "theta_sub Y_split = Y_sub.reshape(-1,) index_split = index_sub.reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) edge_index,", "= np.tile(boundary, self.n) self.edge_index_boundary = np.stack([vertice2, vertice1], axis=0) self.n_edges_boundary = self.edge_index_boundary.shape[1] return torch.tensor(self.edge_index_boundary,", "= grid_sample[edge_index.T].reshape(n_edges, -1) edge_attr[:, 2*self.d] = a[edge_index[0]] edge_attr[:, 2*self.d+1] = a[edge_index[1]] edge_attr =", "= h_x_l * h_y_l a = downsample(params, n_x, (2 ** l)) if grid", "= [] index = 0 for i in range(self.splits): if i==0: idx, idx_all", "self.d] = theta[self.edge_index_21[0]] edge_attr_21[:, 2 * self.d + 1] = theta[self.edge_index_21[1]] edge_attr_22 =", "torch.cat(edge_index_out, dim=1) edge_index_down_out = torch.cat(edge_index_down_out, dim=1) edge_index_up_out = torch.cat(edge_index_up_out, dim=1) return edge_index_out, edge_index_down_out,", "r, is_forward=False): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1]", "len(sample_idx[0].shape): std = self.std[:,sample_idx]+ self.eps # T*batch*n mean = self.mean[:,sample_idx] # x is", "elif grid == 'grid_edge_aug': X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l, a) # update", "def ball_connectivity(self, grid): pwd = sklearn.metrics.pairwise_distances(grid) edge_index = np.vstack(np.where(pwd <= self.radius)) n_edges =", "self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_21[:, 2 * self.d] = theta[self.edge_index_21[0]] edge_attr_21[:, 2 * self.d +", "= theta.reshape(resolution, resolution,-1) # self.y = y.reshape(resolution, resolution,1) self.resolution = resolution if resolution%2==1:", "in range(n_x): i = x i1 = (x+1)%n_x edge_index.append((i, i1)) edge_index.append((i1, i ))", "in range(self.level-1): edge_index_down_range[l, 0] = n_edge_index edge_index_up_range[l, 0] = n_edge_index n_edge_index = n_edge_index", "self.idx_all = None self.grid_sample = [] self.grid_sample_all = None self.edge_index = [] self.edge_index_down", "2 * self.d + 1] = theta[self.edge_index[l][1]] self.edge_attr.append(torch.tensor(edge_attr, dtype=torch.float)) for l in range(self.level", "dim=1) return edge_index_out, edge_index_down_out, edge_index_up_out def get_edge_index_range(self): # in order to use graph", "X, edge_index, edge_attr def grid_edge1d(n_x, a=None): if a != None: a = a.reshape(n_x)", "self.n = resolution**d self.d = d self.m = m self.l = l self.radius", "else: self.n = 1 grids = [] for j in range(self.d): grids.append(np.linspace(real_space[j][0], real_space[j][1],", "idx = perm[:m] grid_sample = self.grid.reshape(self.n, -1)[idx] theta_sample = theta.reshape(self.n, -1)[idx] Y_sample =", "torch.randint(0, self.r, (1,)) y = torch.randint(0, self.r, (1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1,", "0)) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index,", "= [] self.edge_index_down = [] self.edge_index_up = [] self.edge_attr = [] self.edge_attr_down =", "sample_idx_list, is_cuda=False): assert len(out_list) == self.splits if is_cuda: pred = torch.zeros(self.n, ).cuda() else:", "range(self.level-1): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l], self.grid_sample[l+1]) edge_index = np.vstack(np.where(pwd <= radius_inter[l])) + index edge_index[1,", "edge_index_range[l, 0] = n_edge_index n_edge_index = n_edge_index + self.edge_index[l].shape[1] edge_index_range[l, 1] = n_edge_index", "def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet_sin, self).__init__() self.n_layers = len(layers) - 1", "out_list[i].reshape(-1) return pred # generate graph, with split and assemble with downsample class", "1.0, s_l) grid_l = xs grid_l = torch.tensor(grid_l, dtype=torch.float) print(grid_l.shape) grid_list.append(grid_l) theta_l =", "self.old_mat = True except: self.data = h5py.File(self.file_path) self.old_mat = False def load_file(self, file_path):", "self.boundary = np.concatenate([boundary1, boundary2, boundary3, boundary4]) def boundary_connectivity2d(self, stride=1): boundary = self.boundary[::stride] boundary_size", "index shall be stored as tensor instead of list # we concatenate the", "dtype=torch.long) n_edge_index = 0 for l in range(self.level): edge_index_range[l, 0] = n_edge_index n_edge_index", "multi_pole_grid1d(theta, theta_d, s, N, is_periodic=False): grid_list = [] theta_list = [] edge_index_list =", "= torch.norm(x.reshape(num_examples,-1) - y.reshape(num_examples,-1), self.p, 1) y_norms = torch.norm(y.reshape(num_examples,-1), self.p, 1) if self.reduction:", "theta[self.edge_index_22[1]] return torch.tensor(edge_attr, dtype=torch.float), \\ torch.tensor(edge_attr_12, dtype=torch.float), \\ torch.tensor(edge_attr_21, dtype=torch.float), \\ torch.tensor(edge_attr_22, dtype=torch.float)", "i + n_x)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) **", "index_sub = self.index[x::self.r, y::self.r].reshape(-1,) n_sub = Y_sub.shape[0] if self.m >= n_sub: m =", "edge_attr_inter2], dim=0) edge_index_global.append(edge_index_inter) edge_attr_global.append(edge_attr_inter) X = torch.cat(X_global, dim=0) edge_index = torch.cat(edge_index_global, dim=1) edge_attr", "self.resolution = resolution if resolution%2==1: self.s = int(((resolution - 1)/r) + 1) else:", "torch.cat(self.edge_attr, dim=0) edge_attr_down_out = torch.cat(self.edge_attr_down, dim=0) edge_attr_up_out = torch.cat(self.edge_attr_up, dim=0) return edge_attr_out, edge_attr_down_out,", "* self.d + 1] = theta[self.edge_index_22[1]] return torch.tensor(edge_attr, dtype=torch.float), \\ torch.tensor(edge_attr_12, dtype=torch.float), \\", "# Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr,", "torch.tensor(self.grid_sample_both, dtype=torch.float) def ball_connectivity(self, r11, r12, r22): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) pwd12 = sklearn.metrics.pairwise_distances(self.grid_sample,", "sklearn.metrics.pairwise_distances(self.grid_sample_i) self.edge_index = np.vstack(np.where(pwd <= r11)) self.edge_index_12 = np.vstack(np.where(pwd12 <= r12)) self.edge_index_12[1,:] =", "# n mean = self.mean else: if len(self.mean.shape) == len(sample_idx[0].shape): std = self.std[sample_idx]", "2*self.d+2)) edge_attr_boundary[:,0:2*self.d] = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) edge_attr_boundary[:, 2 * self.d] = theta[self.edge_index_boundary[0]] edge_attr_boundary[:, 2 *", "self.s-1 else: nx = self.s ny = self.s # pred_ij = pred_i[idx :", "self.mean else: if len(self.mean.shape) == len(sample_idx[0].shape): std = self.std[sample_idx] + self.eps # batch*n", "* self.d)) edge_attr_21[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_21[:, 2 * self.d]", "None: a = a.reshape(n_x, n_y) xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0,", "= size_average def abs(self, x, y): num_examples = x.size()[0] #Assume uniform mesh h", "self.reduction: if self.size_average: return torch.mean(diff_norms/y_norms) else: return torch.sum(diff_norms/y_norms) return diff_norms/y_norms def __call__(self, x,", "self.d + 1] = theta[self.edge_index[1]] edge_attr_12 = np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_12[:, 0:2", "= x_j % s_l if (x_j in range(s_l)): # if (xi, yi), (xj,", "a[edge_index[0]] edge_attr[:, 4 + self.edge_features: 4 + self.edge_features * 2] = a[edge_index[1]] edge_attr", "is None: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index_boundary[0]], theta[self.edge_index_boundary[1]])", "= theta.reshape(self.resolution, self.resolution, theta_d) data = [] for x in range(self.r): for y", "= np.stack([Y_diff0, Y_diff1, Y_diff2, Y_diff3, Y_diff4], axis=2) pwd = np.min(PWD, axis=2) pwd_index =", "X_l = torch.tensor(l, dtype=torch.float).repeat(n_l, 1) # X = torch.cat([X, X_l], dim=1) X_global.append(X) #", "1, 0)) edge_index.append((i + n_x, i)) edge_attr.append((0, -1, 0)) X = torch.tensor(grid, dtype=torch.float)", "pred = torch.zeros(self.n, ).cuda() else: pred = torch.zeros(self.n, ) for i in range(self.splits):", "edge_index.append((i + 1, i )) if a != None: a1 = a[x, y]", "dtype=torch.long)) self.n_edges_inner.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0] index = 0 for l in", "/ n_x edge_index.append((i, i + 1)) edge_index.append((i + 1, i )) if a", "+ 1, i)) edge_attr.append((-1, 0, 0)) if (y != n_y - 1): edge_index.append((i,", "dtype=torch.long) def attributes(self, f=None, theta=None): if f is None: if theta is None:", "edge_attr def grid_edge_aug_full(n_x, n_y, r, a): n = n_x * n_y xs =", "self.to_torch = to_torch def set_float(self, to_float): self.to_float = to_float # normalization, pointwise gaussian", "= torch.tensor(theta_l, dtype=torch.float) print(theta_l.shape) theta_list.append(theta_l) # for the finest level, we construct the", "= pred_i[j,:].reshape(-1,) idx = split_idx_i[j,:].reshape(-1,) out[idx] = out[idx] + pred_ij out = out", "ntrain*T*n or ntrain*n*T self.mean = torch.mean(x, 0) self.std = torch.std(x, 0) self.eps =", "= np.zeros((self.n_edges, 3 * self.d)) edge_attr[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:,", "= theta[self.edge_index[0]] edge_attr[:, 2 * self.d + 1] = theta[self.edge_index[1]] edge_attr_12 = np.zeros((self.n_edges_12,", "Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) else: data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr,", "pred_ij = pred_i[idx : idx + nx * ny] out[x::self.r, y::self.r] = pred_ij[:nx", "x.size() x = x.view(s[0], -1) x = self.a*x + self.b x = x.view(s)", "self.perm[index: index_end] else: idx = torch.cat((self.perm[index: ],self.perm[: index_end]), dim=0) self.idx.append(idx) self.grid_sample.append(self.grid[idx]) index =", "type are postive assert d > 0 and p > 0 self.d =", "else: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index_boundary[0]], theta[self.edge_index_boundary[1]]) return torch.tensor(edge_attr_boundary, dtype=torch.float) # generate graphs", "= [] self.n_edges_inter = [] edge_index_out = [] edge_index_down_out = [] edge_index_up_out =", "<= radius_inter[l])) + index edge_index[1, :] = edge_index[1, :] + self.grid_sample[l].shape[0] self.edge_index_down.append(edge_index) edge_index_down_out.append(torch.tensor(edge_index,", "4:4+self.edge_features] = a[edge_index[0]] edge_attr[:, 4+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx", "None: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index_boundary[0]], theta[self.edge_index_boundary[1]]) return", "list (time seq) assert len(pred) == len(split_idx) assert len(pred[0]) == self.T assert len(pred)", "x_j % s_l # if (xj, yj) is a valid node if (x_j", "or T*batch*n x = (x * std) + mean return x def cuda(self):", "if theta is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) else: edge_attr = np.zeros((self.n_edges, 2*self.d+2)) edge_attr[:,0:2*self.d]", "edge_attr[:, 2 * self.d + self.attr_features: 2 * self.d + 2*self.attr_features] = theta[self.edge_index[1]].view(-1,", "sub-grids self.grid = grid.reshape(resolution, resolution,2) # self.theta = theta.reshape(resolution, resolution,-1) # self.y =", "np.array(range(2 * s - 1, n, s)) self.boundary = np.concatenate([boundary1, boundary2, boundary3, boundary4])", "torch.randint(0, self.r, (1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub = theta[x::self.r, y::self.r,", "pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='constant', cval=0) out = torch.tensor(out, dtype=torch.float)", "the range of each level edge_index_range = torch.zeros((self.level,2), dtype=torch.long) edge_index_down_range = torch.zeros((self.level-1,2), dtype=torch.long)", "torch.cat([grid_all, theta_all[idx_all,:] ], dim=1) data.append(Data(x=x, edge_index_mid=edge_index, edge_index_down=edge_index_down, edge_index_up=edge_index_up, edge_index_range=edge_index_range, edge_index_down_range=edge_index_down_range, edge_index_up_range=edge_index_up_range, edge_attr_mid=edge_attr, edge_attr_down=edge_attr_down,", "ny = self.s else: ny = self.s-1 else: nx = self.s ny =", "else: data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split, params=params) print('train', X.shape, Y_split.shape,", "n_x)) edge_attr.append((0, 1, 0)) edge_index.append((i + n_x, i)) edge_attr.append((0, -1, 0)) X =", "* self.d)) edge_attr_22[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) edge_attr_22[:, 2 * self.d]", "= a[x, y] a2 = a[x + 1, y] edge_attr.append((x / n_x, y", "Y1 - Y2 return X_diff, Y_diff def torus_connectivity(self, grid): pwd0 = sklearn.metrics.pairwise_distances(grid, grid)", "1] = theta[self.edge_index_22[1]] return torch.tensor(edge_attr, dtype=torch.float), \\ torch.tensor(edge_attr_12, dtype=torch.float), \\ torch.tensor(edge_attr_21, dtype=torch.float), \\", "y+1] edge_index.append((i, i + n_x)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)),", "Y[:,x::self.r, y::self.r].reshape(self.T,-1) index_sub = self.index[x::self.r, y::self.r].reshape(-1, ) n_sub = Y_sub.shape[1] if self.m >=", "self.eps # n mean = self.mean else: if len(self.mean.shape) == len(sample_idx[0].shape): std =", "0 and p > 0 self.d = d self.p = p self.reduction =", "data def assemble(self, pred, split_idx, batch_size2, sigma=1): assert len(pred) == len(split_idx) assert len(pred)", "[] for j in range(self.d): grids.append(np.linspace(real_space[j][0], real_space[j][1], mesh_size[j])) self.n *= mesh_size[j] self.grid =", "0 for l in range(self.level-1): edge_index_down_range[l, 0] = n_edge_index edge_index_up_range[l, 0] = n_edge_index", "= [] edge_index_up_out = [] index = 0 for l in range(self.level): pwd", "file_path, to_torch=True, to_cuda=False, to_float=True): super(MatReader, self).__init__() self.to_torch = to_torch self.to_cuda = to_cuda self.to_float", "1] edge_attr.append((x / n_x, a1, a2)) edge_attr.append((x / n_x, a2, a1)) X =", "a = a.reshape(n_x, n_y) xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0,", "sample_idx=idx[0])) return data def assembler(self, out_list, sample_idx_list, is_cuda=False): assert len(out_list) == self.splits if", "theta_sub = theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub = Y[:,x::self.r, y::self.r].reshape(self.T,-1) index_sub = self.index[x::self.r,", "= a[edge_index[0]] edge_attr[:, 4+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx =", "theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.resolution, self.resolution) x = torch.randint(0,self.r,(1,))", "Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3 + self.edge_features * 2)) a =", "= split_idx[i].reshape(batch_size2, self.m) for j in range(batch_size2): pred_ij = pred_i[j,:].reshape(-1,) idx = split_idx_i[j,:].reshape(-1,)", ">= 1 self.layers = nn.ModuleList() for j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) def forward(self,", "None: if theta is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) else: edge_attr = np.zeros((self.n_edges, 2*self.d+2))", "= torch.randperm(self.n) perm = perm.reshape(self.num, self.m) for j in range(self.num): idx = perm[j,:].reshape(-1,)", "1 / n_x edge_index.append((i, i + 1)) edge_index.append((i + 1, i )) if", "x def cuda(self): self.mean = self.mean.cuda() self.std = self.std.cuda() def cpu(self): self.mean =", "edge_attr.shape) return data def assemble(self, pred, split_idx, batch_size2, sigma=1, cuda=False): assert len(pred) ==", "class RandomMultiMeshGenerator(object): def __init__(self, real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshGenerator, self).__init__() self.d = len(real_space)", "mymax = torch.max(x, 0)[0].view(-1) self.a = (high - low)/(mymax - mymin) self.b =", "if self.edge_index_boundary == None: # self.boundary_connectivity2d() if f is None: if theta is", "\\ torch.tensor(edge_attr_21, dtype=torch.float), \\ torch.tensor(edge_attr_22, dtype=torch.float) # generate multi-level graph class RandomMultiMeshGenerator(object): def", "[] self.grid_sample = [] perm = torch.randperm(self.n) index = 0 for l in", "= theta[self.edge_index_up[l][0]] edge_attr_up[:, 2 * self.d + 1] = theta[self.edge_index_up[l][1]] self.edge_attr_up.append(torch.tensor(edge_attr_up, dtype=torch.float)) edge_attr_out", "None): self.perm = torch.randperm(self.n) index = index0 for l in range(self.level): index =", "return self.idx, self.idx_all def get_grid(self): grid_out = [] for grid in self.grid_sample: grid_out.append(torch.tensor(grid,", "dtype=torch.long)) self.n_edges_inter.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0] edge_index_out = torch.cat(edge_index_out, dim=1) edge_index_down_out =", "torus1d_connectivity(self, r): grid = self.grid_sample pwd0 = sklearn.metrics.pairwise_distances(grid, grid) grid1 = grid grid1[:,0]", "= self.ball_connectivity(grid_split) edge_attr = np.zeros((n_edges, 4+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, :4] =", "p self.reduction = reduction self.size_average = size_average def abs(self, x, y): num_examples =", "range(self.r): for y in range(self.r): grid_sub = self.grid[x::self.r, y::self.r,:].reshape(-1,2) theta_sub = theta[x::self.r, y::self.r,:].reshape(-1,theta_d)", "split_idx=split_idx)) else: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, params=params)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return", "= index % self.n index_end = (index+self.ms[l]) % self.n if index < index_end:", "a = theta_split[:, :self.edge_features] edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4+self.edge_features] = a[edge_index[0]]", "+ n_x)) edge_attr.append((0, 1, 0)) edge_index.append((i + n_x, i)) edge_attr.append((0, -1, 0)) X", "n_x edge_index.append((i, i2)) edge_index.append((i2, i )) if a != None: a1 = a[x]", "*= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T def ball_connectivity(self, r): pwd", "-1)) if self.to_float: x = x.astype(np.float32) if self.to_torch: x = torch.from_numpy(x) if self.to_cuda:", "self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) else: theta = theta[self.idx_both] edge_attr = np.zeros((self.n_edges, 3 * self.d)) edge_attr[:,", "RandomMultiMeshGenerator(object): def __init__(self, real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshGenerator, self).__init__() self.d = len(real_space) self.m", "0 self.num = self.n // self.m # number of sub-grid def get_data(self, theta,", "# print('create multi_grid with size:', X.shape, edge_index.shape, edge_attr.shape, mask_index.shape) return (X, edge_index, edge_attr,", "torch.tensor(self.grid_sample, dtype=torch.float) def ball_connectivity(self, r, is_forward=False): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) self.edge_index = np.vstack(np.where(pwd <=", "theta = theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.resolution, self.resolution) x = torch.randint(0,self.r,(1,)) y", "self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3 + self.edge_features * 2)) a = theta_split[:, :self.edge_features]", "grid in self.grid_sample: grid_out.append(torch.tensor(grid, dtype=torch.float)) return grid_out, torch.tensor(self.grid_sample_all, dtype=torch.float) def ball_connectivity(self, radius_inner, radius_inter):", "edge_attr.append((0, 1, 0)) edge_index.append((i + n_x, i)) edge_attr.append((0, -1, 0)) X = torch.tensor(grid,", "m=100, radius=0.15, T=None, edge_features=1, ): super(TorusGridSplitter, self).__init__() self.grid = grid.reshape(resolution, resolution,2) # self.theta", "self.index[x::self.r, y::self.r].reshape(-1, ) n_sub = Y_sub.shape[1] if self.m >= n_sub: m = self.m", "= grid[i2] d = np.linalg.norm(x1-x2) if(d<=r): a1 = a[i1] a2 = a[i2] edge_index.append((i1,", "pred, split_idx, batch_size2, sigma=1): assert len(pred) == len(split_idx) assert len(pred) == self.r**2 //", "2 * self.d] = theta[self.edge_index[l][0]] edge_attr[:, 2 * self.d + 1] = theta[self.edge_index[l][1]]", "perm[j,:].reshape(-1,) grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] X = torch.cat([grid_sample,theta_sample],dim=1) pwd = sklearn.metrics.pairwise_distances(grid_sample)", "Utilities # ################################################# device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # reading data", "theta_split = torch.cat([theta_sub, theta_sample], dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=1).reshape(self.T,-1) index_split = torch.cat([index_sub,", "graph, with split and assemble class RandomMultiMeshSplitter(object): def __init__(self, real_space, mesh_size, level, sample_sizes):", "dim=1) # else: # X_l = torch.tensor(l, dtype=torch.float).repeat(n_l, 1) # X = torch.cat([X,", "if self.reduction: if self.size_average: return torch.mean(diff_norms/y_norms) else: return torch.sum(diff_norms/y_norms) return diff_norms/y_norms def __call__(self,", "edge_index_nn = torch.tensor(edge_index_nn, dtype=torch.long) edge_index_nn = edge_index_nn.transpose(0,1) edge_index_list.append(edge_index_nn) edge_index_list_cuda.append(edge_index_nn.cuda()) print('edge', edge_index_nn.shape) # we", "edge_index_inter.shape) print(len(grid_list),len(edge_index_list),len(theta_list)) return grid_list, theta_list, edge_index_list, edge_index_list_cuda def get_edge_attr(grid, theta, edge_index): n_edges =", "distance.reshape(n_edges, ) edge_attr[:, 3:3 + self.edge_features] = a[edge_index[0]] edge_attr[:, 3 + self.edge_features: 4", "= torch.tensor(index2).reshape(-1) index2 = index2 + num_nodes index2 = torch.tensor(index2, dtype=torch.long) edge_index_inter1 =", "theta_sub Y_split = Y_sub.reshape(self.T, -1) index_split = index_sub.reshape(-1, ) X = torch.cat([grid_split, theta_split],", "* self.d)) edge_attr_down[:, 2 * self.d] = theta[self.edge_index_down[l][0]] edge_attr_down[:, 2 * self.d +", "class GaussianNormalizer(object): def __init__(self, x, eps=0.00001): super(GaussianNormalizer, self).__init__() self.mean = torch.mean(x) self.std =", "edge_index.append((i2, i )) if a != None: a1 = a[x] a2 = a[x", "split_idx=split_idx, sample_idx=index_split) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def assemble(self, pred,", "edge_attr[:, 4 + self.edge_features: 4 + self.edge_features * 2] = a[edge_index[1]] edge_attr =", "np.linspace(0.0, 1.0, s_l) grid_l = xs grid_l = torch.tensor(grid_l, dtype=torch.float) print(grid_l.shape) grid_list.append(grid_l) theta_l", "print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def assemble(self, pred, split_idx, batch_size2,", "construct edges index1 = torch.tensor(range(n_l), dtype=torch.long) index1 = index1 + num_nodes num_nodes +=", "torch.randperm(self.n) self.idx = perm[:self.m] self.grid_sample = self.grid[self.idx] return self.idx def get_grid(self): return torch.tensor(self.grid_sample,", "nx * ny] out[t, x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out,", "compute the interactive neighbors -- their parents are NN but they are not", "set_cuda(self, to_cuda): self.to_cuda = to_cuda def set_torch(self, to_torch): self.to_torch = to_torch def set_float(self,", "0.01) ** 2))) edge_index.append((i2, i1)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 * a2)),", "torch.cat([edge_index_inter1, edge_index_inter2], dim=1) edge_attr_inter1 = torch.tensor((0, 0, 1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter2 = torch.tensor((0,", "self.ms = sample_sizes self.m = sample_sizes[0] self.level = level assert len(sample_sizes) == level", "i)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d", "is None: if theta is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) else: edge_attr = np.zeros((self.n_edges,", "self.grid self.grid_sample_both = self.grid def sample(self): perm = torch.randperm(self.n) self.idx = perm[:self.m] self.idx_i", "[] index = 0 for l in range(self.level): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l]) edge_index =", "theta_split[:, :self.edge_features] edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4 + self.edge_features] = a[edge_index[0]]", "= torch.randint(0,self.r,(1,)) y = torch.randint(0,self.r,(1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub =", "# #construct inter-graph edge if l != depth-1: index2 = np.array(range(n_l//4)).reshape(h_x_l//2, h_y_l//2) #", "np.zeros((self.n_edges, 2*self.d+2)) edge_attr[:,0:2*self.d] = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) edge_attr[:, 2 * self.d] = theta[self.edge_index[0]] edge_attr[:, 2", "= [] X_global = [] num_nodes = 0 # build connected graph for", "torch.zeros((self.level-1,2), dtype=torch.long) edge_index_up_range = torch.zeros((self.level-1,2), dtype=torch.long) n_edge_index = 0 for l in range(self.level):", "-1)) edge_attr_21 = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_22 = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) else: theta = theta[self.idx_both]", "self.n_edges_inner = [] self.n_edges_inter = [] def sample(self, new_sample=True, index0=0): self.idx = []", "2 * self.d : 2 * self.d + self.attr_features] = theta[self.edge_index[0]].view(-1, self.attr_features) edge_attr[:,", "== self.r**2 // batch_size2 out = torch.zeros((self.T, self.resolution,self.resolution)) for t in range(self.T): for", "theta_d) Y = Y.reshape(self.T, self.resolution, self.resolution) x = torch.randint(0, self.r, (1,)) y =", "self.r, (1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub = theta[x::self.r, y::self.r, :].reshape(-1,", "theta): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) data = [] for", "self.perm[index0: index_end] else: idx_all = torch.cat((self.perm[index0:], self.perm[: index_end]), dim=0) self.idx_all = idx_all self.grid_sample_all", "edge_attr[:, 2 * self.d] = theta[self.edge_index[l][0]] edge_attr[:, 2 * self.d + 1] =", "self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) edge_attr_22[:, 2 * self.d] = theta[self.edge_index_22[0]] edge_attr_22[:, 2 * self.d +", "None: for l in range(self.level): edge_attr = self.grid_sample_all[self.edge_index[l].T].reshape((self.n_edges_inner[l], 2*self.d)) self.edge_attr.append(torch.tensor(edge_attr)) for l in", "return torch.tensor(edge_index, dtype=torch.long), n_edges def get_data(self, theta): theta_d = theta.shape[1] theta = theta.reshape(self.resolution,", "= theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.resolution, self.resolution) x = torch.randint(0,self.r,(1,)) y =", "grid[:, 1] + 1 pwd2 = sklearn.metrics.pairwise_distances(grid, grid2) X_diff2, Y_diff2 = self.pairwise_difference(grid, grid2)", "edge_index_inter1 = torch.cat([index1,index2], dim=-1).reshape(2,-1) edge_index_inter2 = torch.cat([index2,index1], dim=-1).reshape(2,-1) edge_index_inter = torch.cat([edge_index_inter1, edge_index_inter2], dim=1)", "def assembler(self, out_list, sample_idx_list, is_cuda=False): assert len(out_list) == self.splits if is_cuda: pred =", "if theta is None: edge_attr_boundary = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) else: edge_attr_boundary = np.zeros((self.n_edges_boundary, 2*self.d+2)) edge_attr_boundary[:,0:2*self.d]", "np.vstack(np.where(pwd12 <= r12)) self.edge_index_12[1,:] = self.edge_index_12[1,:] + self.m self.edge_index_21 = self.edge_index_12[[1,0],:] self.edge_index_22 =", "self.d] = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_21[:, 2 * self.d] = theta[self.edge_index_21[0]] edge_attr_21[:, 2 *", "d = 1 / n_y edge_index.append((i, i + n_x)) edge_index.append((i + n_x, i))", "torch.std(x) self.eps = eps def encode(self, x): x = (x - self.mean) /", "f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index_boundary[0]], theta[self.edge_index_boundary[1]]) return torch.tensor(edge_attr_boundary, dtype=torch.float) # generate graphs with sampling class", "1] = theta[self.edge_index[l][1]] self.edge_attr.append(torch.tensor(edge_attr, dtype=torch.float)) for l in range(self.level - 1): edge_attr_down =", "# if (is_high): # X = torch.cat([torch.zeros(n_l, l * 2), X, torch.zeros(n_l, (depth", "Y_sub = Y[:,x::self.r, y::self.r].reshape(self.T,-1) index_sub = self.index[x::self.r, y::self.r].reshape(-1, ) n_sub = Y_sub.shape[1] if", "self).__init__() # instead of randomly sample sub-grids, here we downsample sub-grids self.grid =", "index_end] else: idx_all = torch.cat((self.perm[index0:], self.perm[: index_end]), dim=0) self.idx_all = idx_all self.grid_sample_all =", "None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) else: edge_attr = np.zeros((self.n_edges, 2*self.d+2)) edge_attr[:,0:2*self.d] = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) edge_attr[:,", "1]) Y_diff = Y1 - Y2 return X_diff, Y_diff def torus_connectivity(self, grid): pwd0", "-1) else: edge_attr = np.zeros((n_edges, 2*self.d+2)) a = theta_sample[:,0] edge_attr[:, :2*self.d] = grid_sample[edge_index.T].reshape(n_edges,", "level assert len(sample_sizes) == level assert len(mesh_size) == self.d if self.d == 1:", "= self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) else: theta = theta[self.idx_both] edge_attr = np.zeros((self.n_edges, 3 * self.d))", "assert d > 0 and p > 0 self.d = d self.p =", "theta_sub = theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub = Y[x::self.r, y::self.r].reshape(-1,) index_sub = self.index[x::self.r,", "0)) edge_index.append((i + 1, i)) edge_attr.append((-1, 0, 0)) if (y != n_y -", "* self.d : 2 * self.d + self.attr_features] = theta[self.edge_index[0]].view(-1, self.attr_features) edge_attr[:, 2", "- 1, n, s)) self.boundary = np.concatenate([boundary1, boundary2, boundary3, boundary4]) def boundary_connectivity2d(self, stride=1):", "= theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.T, self.resolution, self.resolution) x", "class SquareMeshGenerator(object): def __init__(self, real_space, mesh_size): super(SquareMeshGenerator, self).__init__() self.d = len(real_space) self.s =", "j, l in enumerate(self.layers): x = l(x) if j != self.n_layers - 1:", "= n_edge_index return edge_index_range, edge_index_down_range, edge_index_up_range def attributes(self, theta=None): self.edge_attr = [] self.edge_attr_down", "in range(self.level): self.idx.append(perm[index: index+self.m[l]]) self.grid_sample.append(self.grid[self.idx[l]]) index = index+self.m[l] self.idx_all = perm[:index] self.grid_sample_all =", "sample_sizes self.m = sample_sizes[0] self.level = level assert len(sample_sizes) == level assert len(mesh_size)", "l != depth-1: index2 = np.array(range(n_l//4)).reshape(h_x_l//2, h_y_l//2) # torch.repeat is different from numpy", "= self.idx self.idx_both = self.idx self.grid_sample = self.grid self.grid_sample_i = self.grid self.grid_sample_both =", "1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def multi_grid(depth, n_x, n_y,", "h_x_l) elif grid == 'grid_edge': X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l, a) elif", "split_idx=idx)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def assemble(self, pred, split_idx, batch_size2,", "self.grid_sample = [] perm = torch.randperm(self.n) index = 0 for l in range(self.level):", "= 0 for i in range(self.splits): if i==0: idx, idx_all = self.sample(new_sample=True, index0=index)", "= pred_i[j,:] x, y = split_idx_i[j] if self.resolution%2==1: if x==0: nx = self.s", "return out.reshape(-1,) # generate graph on Torus, with split and assemble class TorusGridSplitter(object):", "self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub = theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub = Y[:,x::self.r,", "edge_attr_22 = np.zeros((self.n_edges_22, 3 * self.d)) edge_attr_22[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1))", "print('n:',self.n,' m:',self.m, ' number of splits:', self.splits ) self.perm = None self.idx =", "grid1 = grid grid1[:,0] = grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) PWD = np.stack([pwd0,pwd1],", "of batch*n or T*batch*n x = (x * std) + mean return x", "in range(self.r): grid_sub = self.grid[x::self.r, y::self.r,:].reshape(-1,2) theta_sub = theta[x::self.r, y::self.r,:].reshape(-1,theta_d) perm = torch.randperm(self.n)", "torch.zeros((self.T, self.resolution,self.resolution)) for t in range(self.T): for i in range(len(pred)): pred_i = pred[i][t].reshape(batch_size2,", "2)) edge_attr_down[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_down[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_down[:, 2", "* self.d + 1] = theta[self.edge_index_12[1]] edge_attr_21 = np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_21[:,", "is_cuda=False): assert len(out_list) == self.splits if is_cuda: pred = torch.zeros(self.n, ).cuda() else: pred", "sklearn.metrics.pairwise_distances(grid, grid1) X_diff1, Y_diff1 = self.pairwise_difference(grid, grid1) grid2 = grid grid2[:, 1] =", "n_edges = edge_index.shape[1] edge_index = torch.tensor(edge_index, dtype=torch.long) if edge_features == 0: edge_attr =", "+ self.edge_features] = a[edge_index[0]] edge_attr[:, 3 + self.edge_features: 4 + self.edge_features * 2]", "= np.array(range(s, n, s)) boundary4 = np.array(range(2 * s - 1, n, s))", "torch.cat([index_sub, idx], dim=0).reshape(-1, ) X = torch.cat([grid_split, theta_split], dim=1) else: grid_split = grid_sub", "X = torch.cat([grid_sample,theta_sample],dim=1) pwd = sklearn.metrics.pairwise_distances(grid_sample) edge_index = np.vstack(np.where(pwd <= self.radius)) n_edges =", "# generate graph on Torus, with split and assemble class TorusGridSplitter(object): def __init__(self,", "j in range(batch_size2): pred_ij = pred_i[j,:].reshape(-1,) idx = split_idx_i[j,:].reshape(-1,) out[idx] = out[idx] +", "perm = torch.randperm(self.n) index = 0 for l in range(self.level): self.idx.append(perm[index: index+self.m[l]]) self.grid_sample.append(self.grid[self.idx[l]])", "4+self.edge_features*2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2)", "radius_inter[l])) + index edge_index[1, :] = edge_index[1, :] + self.grid_sample[l].shape[0] self.edge_index_down.append(edge_index) edge_index_down_out.append(torch.tensor(edge_index, dtype=torch.long))", "self.r**2 // batch_size2 out = torch.zeros((self.T, self.resolution,self.resolution)) for t in range(self.T): for i", "theta.reshape(self.n,-1)[idx] grid_split = torch.cat([grid_sub, grid_sample],dim=0) theta_split = torch.cat([theta_sub, theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index,", "2), np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) X =", "ys = np.linspace(0.0, 1.0, n_y) grid = np.vstack([xx.ravel() for xx in np.meshgrid(xs, ys)]).T", "2))) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index,", "in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) if j != self.n_layers - 1: if normalize: self.layers.append(nn.BatchNorm1d(layers[j+1]))", "self.grid_sample = self.grid self.grid_sample_i = self.grid self.grid_sample_both = self.grid def sample(self): perm =", "self.p, 1) if self.reduction: if self.size_average: return torch.mean(all_norms) else: return torch.sum(all_norms) return all_norms", "theta_split[:, :self.edge_features] edge_attr[:, 0] = X_difference.reshape(n_edges, ) edge_attr[:, 1] = Y_difference.reshape(n_edges, ) edge_attr[:,", "edge_index_down_out = torch.cat(edge_index_down_out, dim=1) edge_index_up_out = torch.cat(edge_index_up_out, dim=1) return edge_index_out, edge_index_down_out, edge_index_up_out def", "None: a1 = a[x, y] a2 = a[x, y+1] edge_attr.append((x/n_x, y/n_y, a1, a2))", "theta = theta[self.idx_both] edge_attr = np.zeros((self.n_edges, 3 * self.d)) edge_attr[:, 0:2 * self.d]", "of ntrain*n or ntrain*T*n or ntrain*n*T self.mean = torch.mean(x, 0) self.std = torch.std(x,", "dtype=torch.float) return out.reshape(-1,) # generate multi-level graph, with split and assemble class RandomMultiMeshSplitter(object):", "(self.perm is None): self.perm = torch.randperm(self.n) index = index0 for l in range(self.level):", "self.m) for j in range(batch_size2): pred_ij = pred_i[j,:].reshape(-1,) idx = split_idx_i[j,:].reshape(-1,) out[idx] =", "self.s = int(resolution/r) self.r = r self.n = resolution**2 self.m = m self.T", "np.array(range(n_y)) grid = np.vstack([xx.ravel() for xx in np.meshgrid(xs, ys)]).T edge_index = [] edge_attr", "data data = [] index = 0 for i in range(self.splits): if i==0:", "= y * n_x + x if (x != n_x - 1): d", "pred # generate graph, with split and assemble with downsample class DownsampleGridSplitter(object): def", "= grid1[:,0] y1 = grid1[:,1] x2 = grid2[:,0] y2 = grid2[:,1] X1 =", "+ nx * ny] out[x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out,", "torch.cat([Y_sub, Y_sample], dim=1).reshape(self.T,-1) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1, ) X = torch.cat([grid_split, theta_split],", "torch.cat(self.edge_attr_up, dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out # generate graph, with split and assemble", "def decode(self, x, sample_idx=None): if sample_idx is None: std = self.std + self.eps", "x in range(self.r): for y in range(self.r): grid_sub = self.grid[x::self.r, y::self.r,:].reshape(-1,2) theta_sub =", "= self.s # pred_ij = pred_i[idx : idx + nx * ny] out[x::self.r,", "self).__init__() self.grid = grid.reshape(resolution, resolution,2) # self.theta = theta.reshape(resolution, resolution,-1) # self.y =", "None self.grid_sample = [] self.grid_sample_all = None self.edge_index = [] self.edge_index_down = []", "return torch.tensor(self.grid_sample, dtype=torch.float) def ball_connectivity(self, r, is_forward=False): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) self.edge_index = np.vstack(np.where(pwd", "def __init__(self, d=2, p=2, size_average=True, reduction=True): super(LpLoss, self).__init__() #Dimension and Lp-norm type are", "theta[self.edge_index_boundary[1]] else: xy = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) if theta is None: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:])", "* self.d + 2)) edge_attr_up = np.zeros((self.n_edges_inter[l], 2 * self.d + 2)) edge_attr_down[:,", "2)], dim=1) # else: # X_l = torch.tensor(l, dtype=torch.float).repeat(n_l, 1) # X =", "::r_l, :] theta_l = theta_l.reshape(N, n_l, theta_d) theta_l = torch.tensor(theta_l, dtype=torch.float) print(theta_l.shape) theta_list.append(theta_l)", "= sklearn.metrics.pairwise_distances(self.grid_sample) pwd12 = sklearn.metrics.pairwise_distances(self.grid_sample, self.grid_sample_i) pwd22 = sklearn.metrics.pairwise_distances(self.grid_sample_i) self.edge_index = np.vstack(np.where(pwd <=", "parents are NN but they are not NearestNeighbor edge_index_inter = [] for x_i", "[] for x in range(self.r): for y in range(self.r): grid_sub = self.grid[x::self.r, y::self.r,:].reshape(-1,2)", "else: ny = self.s-1 else: nx = self.s ny = self.s # pred_ij", "= np.array(range(self.n)) self.grid_sample = self.grid def sample(self): perm = torch.randperm(self.n) self.idx = perm[:self.m]", "list (batches) of list (time seq) assert len(pred) == len(split_idx) assert len(pred[0]) ==", "self.pairwise_difference(grid, grid) grid1 = grid grid1[:,0] = grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) X_diff1,", "(self.std + self.eps) return x def decode(self, x, sample_idx=None): x = (x *", "boundary2, boundary3, boundary4]) def boundary_connectivity2d(self, stride=1): boundary = self.boundary[::stride] boundary_size = len(boundary) vertice1", "index_end] else: idx = torch.cat((self.perm[index: ],self.perm[: index_end]), dim=0) self.idx.append(idx) self.grid_sample.append(self.grid[idx]) index = index_end", "2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i2, i1)) edge_attr.append((d, a2, a1, 1 /", "(2 ** l)) if grid == 'grid': X, edge_index_inner, edge_attr_inner = grid(h_y_l, h_x_l)", "for j, l in enumerate(self.layers): x = l(x) if j != self.n_layers -", "edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index_boundary[0]], theta[self.edge_index_boundary[1]]) return torch.tensor(edge_attr_boundary, dtype=torch.float) # generate graphs with", "= np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_12[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_12[:,", "in range(n_x): i = y * n_x + x if (x != n_x", "out.reshape(-1,) # generate graph on Torus, with split and assemble class TorusGridSplitter(object): def", "== 'grid_edge': X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l, a) elif grid == 'grid_edge_aug':", "= self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 * self.d : 2 * self.d + self.attr_features]", "(x != n_x - 1): d = 1 / n_x a1 = a[x,", "edge_index_list = [] edge_index_list_cuda = [] level = int(np.log2(s) - 1) print(level) for", "square domain class SquareMeshGenerator(object): def __init__(self, real_space, mesh_size): super(SquareMeshGenerator, self).__init__() self.d = len(real_space)", "n]) X2 = np.tile(x2.reshape(1, n), [n, 1]) X_diff = X1 - X2 Y1", "r11, r12, r22): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) pwd12 = sklearn.metrics.pairwise_distances(self.grid_sample, self.grid_sample_i) pwd22 = sklearn.metrics.pairwise_distances(self.grid_sample_i)", "0, 0)) if (y != n_y - 1): edge_index.append((i, i + n_x)) edge_attr.append((0,", "2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i + 1, i)) edge_attr.append((d, a2, a1,", "is None: for l in range(self.level): edge_attr = self.grid_sample_all[self.edge_index[l].T].reshape((self.n_edges_inner[l], 2*self.d)) self.edge_attr.append(torch.tensor(edge_attr)) for l", "= torch.tensor(l, dtype=torch.float).repeat(n_l, 1) # X = torch.cat([X, X_l], dim=1) X_global.append(X) # construct", "s = self.s n = self.n boundary1 = np.array(range(0, s)) boundary2 = np.array(range(n", "self.idx = perm[:self.m] self.grid_sample = self.grid[self.idx] return self.idx def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float)", "in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) def forward(self, x): for j, l in enumerate(self.layers): x", "self.n grid, grid_all = self.get_grid() edge_index, edge_index_down, edge_index_up = self.ball_connectivity(radius_inner, radius_inter) edge_index_range, edge_index_down_range,", "edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges, -1)) else: theta = theta[self.idx] edge_attr = np.zeros((self.n_edges, 2 *", "len(split_idx) assert len(pred) == self.num * self.l // batch_size2 out = torch.zeros(self.n, )", "grid_size, l): data = data.reshape(-1, grid_size, grid_size) data = data[:, ::l, ::l] data", "if (y != n_y - 1): d = 1 / n_y edge_index.append((i, i", "in np.meshgrid(*grids)]).T self.splits = self.n // self.m # number of sub-grid if self.splits", "* self.d] = self.grid_sample_all[self.edge_index_up[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_up[:, 2 * self.d] =", "f=None, theta=None): # if self.edge_index_boundary == None: # self.boundary_connectivity2d() if f is None:", "torch.tensor(edge_attr, dtype=torch.float) # # generate two-level graph class RandomTwoMeshGenerator(object): def __init__(self, real_space, mesh_size,", "with split and assemble class TorusGridSplitter(object): def __init__(self, grid, resolution, r, m=100, radius=0.15,", "self.resolution = resolution self.n = resolution**d self.d = d self.m = m self.l", "theta_l = torch.tensor(theta_l, dtype=torch.float) print(theta_l.shape) theta_list.append(theta_l) # for the finest level, we construct", "x could be in shape of ntrain*n or ntrain*T*n or ntrain*n*T self.mean =", "n mean = self.mean else: if len(self.mean.shape) == len(sample_idx[0].shape): std = self.std[sample_idx] +", "params==None: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) else: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, params=params)) print('test', len(data),", "assert len(pred) == len(split_idx) assert len(pred[0]) == self.T assert len(pred) == self.r**2 //", "np.zeros((n_edges, 4+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4+self.edge_features]", "= [] for x in range(n_x): i = x i1 = (x+1)%n_x edge_index.append((i,", "# generate two-level graph class RandomTwoMeshGenerator(object): def __init__(self, real_space, mesh_size, sample_size, induced_point): super(RandomTwoMeshGenerator,", "= sklearn.metrics.pairwise_distances(self.grid_sample) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] if is_forward: print(self.edge_index.shape)", "super(GaussianNormalizer, self).__init__() self.mean = torch.mean(x) self.std = torch.std(x) self.eps = eps def encode(self,", "edge_attr = torch.tensor(edge_attr, dtype=torch.float) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=idx)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape)", "multi-level graph, with split and assemble class RandomMultiMeshSplitter(object): def __init__(self, real_space, mesh_size, level,", "= index_sub.reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr =", "!= n_x - 1): d = 1 / n_x edge_index.append((i, i + 1))", "x = x.cuda() return x def set_cuda(self, to_cuda): self.to_cuda = to_cuda def set_torch(self,", "idx], dim=0).reshape(-1, ) X = torch.cat([grid_split, theta_split], dim=1) else: grid_split = grid_sub theta_split", "= np.array(range(n_x)) # ys = np.array(range(n_y)) edge_index = [] edge_attr = [] for", "self.edge_features * 2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) data.append(Data(x=X,", "X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr = np.zeros((n_edges, 4+self.edge_features*2)) a =", "return torch.tensor(self.edge_index, dtype=torch.long) def gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) rbf = np.exp(-pwd**2/sigma**2) sample", "= self.boundary[::stride] boundary_size = len(boundary) vertice1 = np.array(range(self.n)) vertice1 = np.repeat(vertice1, boundary_size) vertice2", "self.d)) edge_attr_up[:, 2 * self.d] = theta[self.edge_index_up[l][0]] edge_attr_up[:, 2 * self.d + 1]", "+ n_x, i)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) **", "/ 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) X = torch.tensor(grid, dtype=torch.float)", "= sklearn.metrics.pairwise_distances(grid, grid) grid1 = grid grid1[:,0] = grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1)", "sample(self): perm = torch.randperm(self.n) self.idx = perm[:self.m] self.grid_sample = self.grid[self.idx] return self.idx def", "+ 1, i )) if a != None: a1 = a[x, y] a2", "= out / self.l # out = gaussian_filter(out, sigma=sigma, mode='constant', cval=0) # out", "pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) PWD = np.stack([pwd0,pwd1], axis=2) pwd = np.min(PWD, axis=2) self.edge_index", "theta[self.edge_index[0]] edge_attr[:, 2 * self.d +1] = theta[self.edge_index[1]] else: xy = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) if", "sigma=sigma, mode='constant', cval=0) # out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) # generate multi-level", "= edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges def get_data(self, theta): theta_d = theta.shape[1] theta", "np.vstack(np.where(pwd <= self.radius)) pwd_index = pwd_index[np.where(pwd <= self.radius)] PWD_index = (np.where(pwd <= self.radius)[0],", "theta[self.edge_index_22[0]] edge_attr_22[:, 2 * self.d + 1] = theta[self.edge_index_22[1]] return torch.tensor(edge_attr, dtype=torch.float), \\", "self.get_grid() edge_index, edge_index_down, edge_index_up = self.ball_connectivity(radius_inner, radius_inter) edge_index_range, edge_index_down_range, edge_index_up_range = self.get_edge_index_range() edge_attr,", "dtype=torch.float).repeat(n_l, 1) # X = torch.cat([X, X_l], dim=1) X_global.append(X) # construct edges index1", "generate graphs with sampling class RandomMeshGenerator(object): def __init__(self, real_space, mesh_size, sample_size, attr_features=1): super(RandomMeshGenerator,", "+ 1] = theta[self.edge_index_22[1]] return torch.tensor(edge_attr, dtype=torch.float), \\ torch.tensor(edge_attr_12, dtype=torch.float), \\ torch.tensor(edge_attr_21, dtype=torch.float),", "grid_split = torch.cat([grid_sub, grid_sample],dim=0) theta_split = torch.cat([theta_sub, theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges,", "r_l = 2 ** (l - 1) s_l = s // r_l n_l", "m self.l = l self.radius = radius assert self.n % self.m == 0", "DenseNet(torch.nn.Module): def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet, self).__init__() self.n_layers = len(layers) -", "= torch.cat([index2,index1], dim=-1).reshape(2,-1) edge_index_inter = torch.cat([edge_index_inter1, edge_index_inter2], dim=1) edge_attr_inter1 = torch.tensor((0, 0, 1),", "self.std = self.std.cuda() def cpu(self): self.mean = self.mean.cpu() self.std = self.std.cpu() # normalization,", "nn.ModuleList() for j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) def forward(self, x): for j, l", "out = torch.zeros(self.n, ) if cuda: out = out.cuda() for i in range(len(pred)):", "torch.cat((self.perm[index: ],self.perm[: index_end]), dim=0) self.idx.append(idx) self.grid_sample.append(self.grid[idx]) index = index_end if index0 < index_end:", "n_x + x if (x != n_x - 1): d = 1 /", "d = np.linalg.norm(x1-x2) if(d<=r): a1 = a[i1] a2 = a[i2] edge_index.append((i1, i2)) edge_attr.append((d,", "self.to_float = to_float # normalization, pointwise gaussian class UnitGaussianNormalizer(object): def __init__(self, x, eps=0.00001):", "= np.argmin(PWD, axis=2) edge_index = np.vstack(np.where(pwd <= self.radius)) pwd_index = pwd_index[np.where(pwd <= self.radius)]", "read_field(self, field): x = self.data[field] if not self.old_mat: x = x[()] x =", "np.array(range(s, n, s)) boundary4 = np.array(range(2 * s - 1, n, s)) self.boundary", "= theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.n, )[idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0) theta_split", "0: edge_attr = grid_sample[edge_index.T].reshape(n_edges, -1) else: edge_attr = np.zeros((n_edges, 2*self.d+2)) a = theta_sample[:,0]", "if self.splits * self.m < self.n: self.splits = self.splits + 1 print('n:',self.n,' m:',self.m,", "Y_difference.reshape(n_edges, ) edge_attr[:, 2] = distance.reshape(n_edges, ) edge_attr[:, 3:3 + self.edge_features] = a[edge_index[0]]", "edge_index_nn.append([x_i,x_j]) edge_index_nn = torch.tensor(edge_index_nn, dtype=torch.long) edge_index_nn = edge_index_nn.transpose(0,1) edge_index_list.append(edge_index_nn) edge_index_list_cuda.append(edge_index_nn.cuda()) print('edge', edge_index_nn.shape) #", "a2, a1)) X = torch.tensor(xs, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index =", "for l in range(self.level-1): edge_index_down_range[l, 0] = n_edge_index edge_index_up_range[l, 0] = n_edge_index n_edge_index", "None: a1 = a[x, y] a2 = a[x + 1, y] edge_attr.append((x /", "self.size_average: return torch.mean(diff_norms/y_norms) else: return torch.sum(diff_norms/y_norms) return diff_norms/y_norms def __call__(self, x, y): return", ":self.edge_features] edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4 + self.edge_features] = a[edge_index[0]] edge_attr[:,", "def assembleT(self, pred, split_idx, batch_size2, sigma=1): # pred is a list (batches) of", "torch.tensor(index2).reshape(-1) index2 = index2 + num_nodes index2 = torch.tensor(index2, dtype=torch.long) edge_index_inter1 = torch.cat([index1,index2],", "return torch.tensor(self.edge_index, dtype=torch.long) def get_grid(self): return torch.tensor(self.grid, dtype=torch.float) def attributes(self, f=None, theta=None): if", "xx in np.meshgrid(*grids)]).T self.splits = self.n // self.m # number of sub-grid if", "is_periodic: x_j = x_j % s_l if (x_j in range(s_l)): # if (xi,", "self.d] = theta[self.edge_index[0]] edge_attr[:, 2 * self.d +1] = theta[self.edge_index[1]] else: xy =", "len(data), X.shape, edge_index.shape, edge_attr.shape) return data def assemble(self, pred, split_idx, batch_size2, sigma=1, cuda=False):", "edge_index_up_out = [] index = 0 for l in range(self.level): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l])", "self.num * self.l // batch_size2 out = torch.zeros(self.n, ) if cuda: out =", "theta = theta.reshape(self.resolution, self.resolution, theta_d) data = [] for x in range(self.r): for", "= [] for grid in self.grid_sample: grid_out.append(torch.tensor(grid, dtype=torch.float)) return grid_out, torch.tensor(self.grid_sample_all, dtype=torch.float) def", "= self.mean else: if len(self.mean.shape) == len(sample_idx[0].shape): std = self.std[sample_idx] + self.eps #", "i in range(self.l): perm = torch.randperm(self.n) perm = perm.reshape(self.num, self.m) for j in", "* ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='wrap') out = torch.tensor(out, dtype=torch.float) return out.reshape(self.T,self.n)", "* a2)), np.exp(-(d) ** 2), np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01)", "grid self.resolution = resolution self.n = resolution**d self.d = d self.m = m", "2 * self.d] = theta[self.edge_index_boundary[0]] edge_attr_boundary[:, 2 * self.d +1] = theta[self.edge_index_boundary[1]] else:", "= PWD[PWD_index] X_difference = X_DIFF[PWD_index] Y_difference = Y_DIFF[PWD_index] n_edges = edge_index.shape[1] return torch.tensor(edge_index,", "edges index1 = torch.tensor(range(n_l), dtype=torch.long) index1 = index1 + num_nodes num_nodes += n_l", "num_nodes = 0 # build connected graph for l in range(depth): h_x_l =", "pwd = sklearn.metrics.pairwise_distances(self.grid_sample) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] if is_forward:", "= self.edge_index[:, self.edge_index[0] >= self.edge_index[1]] print(self.edge_index.shape) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def", "x = l(x) return x class DenseNet_sin(torch.nn.Module): def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False):", "= sklearn.metrics.pairwise_distances(grid, grid1) PWD = np.stack([pwd0,pwd1], axis=2) pwd = np.min(PWD, axis=2) self.edge_index =", "torch.cat([grid_split, theta_split], dim=1) else: grid_split = grid_sub theta_split = theta_sub Y_split = Y_sub.reshape(-1,)", "assert len(pred[0]) == self.T assert len(pred) == self.r**2 // batch_size2 out = torch.zeros((self.T,", "np.array(range(n_x)) # ys = np.array(range(n_y)) grid = np.vstack([xx.ravel() for xx in np.meshgrid(xs, ys)]).T", "= self.pairwise_difference(grid, grid1) grid2 = grid grid2[:, 1] = grid[:, 1] + 1", "out[idx] + pred_ij out = out / self.l # out = gaussian_filter(out, sigma=sigma,", "perm[: self.m+self.m_i] self.grid_sample = self.grid[self.idx] self.grid_sample_i = self.grid[self.idx_i] self.grid_sample_both = self.grid[self.idx_both] return self.idx,", "perm = perm.reshape(self.num, self.m) for j in range(self.num): idx = perm[j,:].reshape(-1,) grid_sample =", "= self.grid[self.edge_index.T].reshape((self.n_edges,-1)) if theta is None: edge_attr = f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr =", "xs = np.array(range(n_x)) # ys = np.array(range(n_y)) edge_index = [] edge_attr = []", "= np.random.binomial(1,rbf) self.edge_index = np.vstack(np.where(sample)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def attributes(self,", "* self.d)) edge_attr[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 * self.d]", "dim=1) else: grid_split = grid_sub theta_split = theta_sub Y_split = Y_sub.reshape(self.T, -1) index_split", "= torch.cat(self.edge_attr_down, dim=0) edge_attr_up_out = torch.cat(self.edge_attr_up, dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out # generate", "self.n_layers - 1: if normalize: self.layers.append(nn.BatchNorm1d(layers[j+1])) self.layers.append(nonlinearity()) if out_nonlinearity is not None: self.layers.append(out_nonlinearity())", "= self.perm[index: index_end] else: idx = torch.cat((self.perm[index: ],self.perm[: index_end]), dim=0) self.idx.append(idx) self.grid_sample.append(self.grid[idx]) index", "edge_index_up_range=edge_index_up_range, edge_attr_mid=edge_attr, edge_attr_down=edge_attr_down, edge_attr_up=edge_attr_up, sample_idx=idx[0])) return data def assembler(self, out_list, sample_idx_list, is_cuda=False): assert", "ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='wrap') out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) def", "def ball_connectivity(self, r): pwd = sklearn.metrics.pairwise_distances(self.grid) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges =", "2))) edge_index.append((i + n_x, i)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 * a2)),", "dtype=torch.long) def gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) rbf = np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf)", "grid_sample], dim=0) theta_split = torch.cat([theta_sub, theta_sample], dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=0).reshape(-1,) index_split", "theta[self.idx] edge_attr = f(xy[:, 0:self.d], xy[:, self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) #", "1), [1, n]) X2 = np.tile(x2.reshape(1, n), [n, 1]) X_diff = X1 -", "= perm[:index] self.grid_sample_all = self.grid[self.idx_all] return self.idx, self.idx_all def get_grid(self): grid_out = []", "grid3) X_diff3, Y_diff3 = self.pairwise_difference(grid, grid3) grid4 = grid grid4[:, 0] = grid[:,", "= self.grid[self.edge_index.T].reshape((self.n_edges, -1)) else: theta = theta[self.idx] edge_attr = np.zeros((self.n_edges, 2 * self.d", "x.view(s) return x def decode(self, x): s = x.size() x = x.view(s[0], -1)", "self.level assert len(radius_inter) == self.level - 1 self.edge_index = [] self.edge_index_down = []", "split_idx=split_idx, params=params)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def sample(self, theta, Y):", "self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T self.splits = self.n", "def get_grid(self): return torch.tensor(self.grid, dtype=torch.float) def attributes(self, f=None, theta=None): if f is None:", "grid1[:,0] y1 = grid1[:,1] x2 = grid2[:,0] y2 = grid2[:,1] X1 = np.tile(x1.reshape(n,", "torch.tensor(edge_attr, dtype=torch.float), \\ torch.tensor(edge_attr_12, dtype=torch.float), \\ torch.tensor(edge_attr_21, dtype=torch.float), \\ torch.tensor(edge_attr_22, dtype=torch.float) # generate", "n_x, n_y, grid, params): edge_index_global = [] edge_attr_global = [] X_global = []", "not NearestNeighbor if abs(x)>=2: # if their parents are NN if abs(x_i//2 -", "x[()] x = np.transpose(x, axes=range(len(x.shape) - 1, -1, -1)) if self.to_float: x =", "self.mean = self.mean.cpu() self.std = self.std.cpu() # normalization, scaling by range class RangeNormalizer(object):", "* self.d] = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 * self.d : 2 * self.d", "self.pairwise_difference(grid, grid2) grid3 = grid grid3[:, :] = grid[:, :] + 1 pwd3", "level, sample_sizes): super(RandomMultiMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_sizes self.level = level", "self.n = mesh_size[0] self.grid = np.linspace(real_space[0][0], real_space[0][1], self.n).reshape((self.n, 1)) else: self.n = 1", ":] = grid[:, :] + 1 pwd3 = sklearn.metrics.pairwise_distances(grid, grid3) X_diff3, Y_diff3 =", "= self.get_grid() edge_index, edge_index_down, edge_index_up = self.ball_connectivity(radius_inner, radius_inter) edge_index_range, edge_index_down_range, edge_index_up_range = self.get_edge_index_range()", "grid4) PWD = np.stack([pwd0,pwd1,pwd2,pwd3,pwd4], axis=2) X_DIFF = np.stack([X_diff0,X_diff1,X_diff2,X_diff3,X_diff4], axis=2) Y_DIFF = np.stack([Y_diff0, Y_diff1,", "ball_connectivity(self, r11, r12, r22): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) pwd12 = sklearn.metrics.pairwise_distances(self.grid_sample, self.grid_sample_i) pwd22 =", "sample_idx=index_split) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def assemble(self, pred, split_idx,", "self.n = resolution**2 self.m = m self.T = T self.radius = radius self.edge_features", "= edge_index.shape[1] edge_attr = np.zeros((n_edges, 4)) edge_attr[:, 0:2] = grid[edge_index.transpose(0,1)].reshape((n_edges, -1)) edge_attr[:, 2]", "- self.b)/self.a x = x.view(s) return x #loss function with rel/abs Lp loss", "normalization, scaling by range class RangeNormalizer(object): def __init__(self, x, low=0.0, high=1.0): super(RangeNormalizer, self).__init__()", "edge_attr def grid_edge(n_x, n_y, a=None): if a != None: a = a.reshape(n_x, n_y)", "= torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3+self.edge_features*2))", "xs = np.linspace(0.0, 1.0, s_l) grid_l = xs grid_l = torch.tensor(grid_l, dtype=torch.float) print(grid_l.shape)", "data def sample(self, theta, Y): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d)", "- 1): d = 1 / n_x edge_index.append((i, i + 1)) edge_index.append((i +", "index2.repeat(2, axis = 0).repeat(2, axis = 1) index2 = torch.tensor(index2).reshape(-1) index2 = index2", "range(s_l)): edge_index_nn.append([x_i,x_j]) edge_index_nn = torch.tensor(edge_index_nn, dtype=torch.long) edge_index_nn = edge_index_nn.transpose(0,1) edge_index_list.append(edge_index_nn) edge_index_list_cuda.append(edge_index_nn.cuda()) print('edge', edge_index_nn.shape)", "self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def get_grid(self): return torch.tensor(self.grid, dtype=torch.float) def attributes(self,", "2 * self.d + 1] = theta[self.edge_index[1]] edge_attr_12 = np.zeros((self.n_edges_12, 3 * self.d))", "range(self.level - 1): edge_attr_down = np.zeros((self.n_edges_inter[l], 2 * self.d + 2)) edge_attr_up =", "batch_size2, sigma=1): assert len(pred) == len(split_idx) assert len(pred) == self.r**2 // batch_size2 out", ">= self.edge_index[1]] print(self.edge_index.shape) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def torus1d_connectivity(self, r): grid", "Y_diff = Y1 - Y2 return X_diff, Y_diff def torus_connectivity(self, grid): pwd0 =", "a2 = a[x + 1, y] edge_index.append((i, i + 1)) edge_attr.append((d, a1, a2,", ": idx + nx * ny] out[t, x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny)", "2 * self.d +1] = theta[self.edge_index[1]] else: xy = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) if theta is", "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # reading data class MatReader(object): def", "def torus1d_connectivity(self, r): grid = self.grid_sample pwd0 = sklearn.metrics.pairwise_distances(grid, grid) grid1 = grid", "self.edge_index_21 = self.edge_index_12[[1,0],:] self.edge_index_22 = np.vstack(np.where(pwd22 <= r22)) + self.m self.n_edges = self.edge_index.shape[1]", "* self.d] = self.grid_sample_all[self.edge_index_down[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_down[:, 2 * self.d] =", "= self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3 + self.edge_features * 2)) a = theta_split[:,", "** (l - 1) s_l = s // r_l n_l = s_l print('level',s_l,r_l,n_l)", "[] self.edge_attr_up = [] self.n_edges_inner = [] self.n_edges_inter = [] def sample(self, new_sample=True,", "* std) + mean return x def cuda(self): self.mean = self.mean.cuda() self.std =", "self.std.cuda() def cpu(self): self.mean = self.mean.cpu() self.std = self.std.cpu() # normalization, Gaussian class", "= self.n self.idx = np.array(range(self.n)) self.grid_sample = self.grid def sample(self): perm = torch.randperm(self.n)", "self.s else: nx = self.s-1 if y==0: ny = self.s else: ny =", "!= None: a1 = a[x, y] a2 = a[x, y+1] edge_attr.append((x/n_x, y/n_y, a1,", "= x_j % s_l # if (xj, yj) is a valid node if", "import numpy as np import scipy.io import h5py import sklearn.metrics from torch_geometric.data import", "if is_cuda: pred = torch.zeros(self.n, ).cuda() else: pred = torch.zeros(self.n, ) for i", "self.idx = [] self.idx_all = None self.grid_sample = [] self.grid_sample_all = None self.edge_index", "= np.zeros((n_edges, 4+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:,", "1, n, s)) self.boundary = np.concatenate([boundary1, boundary2, boundary3, boundary4]) def boundary_connectivity2d(self, stride=1): boundary", "as np import scipy.io import h5py import sklearn.metrics from torch_geometric.data import Data import", "r, m=100, radius=0.15, edge_features=1): super(DownsampleGridSplitter, self).__init__() # instead of randomly sample sub-grids, here", "-1) index_split = index_sub.reshape(-1, ) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges, distance,", "2), np.exp(-(d / 0.01) ** 2))) if (y != n_y - 1): d", "randomly sample sub-grids, here we downsample sub-grids self.grid = grid.reshape(resolution, resolution,2) # self.theta", "x = self.a*x + self.b x = x.view(s) return x def decode(self, x):", "grid2) X_diff2, Y_diff2 = self.pairwise_difference(grid, grid2) grid3 = grid grid3[:, :] = grid[:,", "edge_index.append((i, i1)) edge_index.append((i1, i )) i2 = (x + 2) % n_x edge_index.append((i,", "edge_attr_inner = grid(h_y_l, h_x_l) elif grid == 'grid_edge': X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l,", "self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_21 = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_22 = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) else: theta =", "self.n_edges_inter = [] def sample(self): self.idx = [] self.grid_sample = [] perm =", "* ny] out[t, x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma,", "assemble(self, pred, split_idx, batch_size2, sigma=1): assert len(pred) == len(split_idx) assert len(pred) == self.r**2", "axis=2) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def", "* self.d + 2)) edge_attr_down[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_down[l].T].reshape( (self.n_edges_inter[l], 2 *", "n), [n, 1]) Y_diff = Y1 - Y2 return X_diff, Y_diff def torus_connectivity(self,", "edge_attr = np.zeros((n_edges, 3+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, 0] = X_difference.reshape(n_edges, )", "split_idx_i = split_idx[i] for j in range(batch_size2): pred_ij = pred_i[j,:] x, y =", "self.d:]) else: theta = theta[self.idx] edge_attr = f(xy[:, 0:self.d], xy[:, self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]])", "0 self.d = d self.p = p self.reduction = reduction self.size_average = size_average", "= attr_features assert len(mesh_size) == self.d if self.d == 1: self.n = mesh_size[0]", "perm[:m] grid_sample = self.grid.reshape(self.n, -1)[idx] theta_sample = theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.n, )[idx]", "Y_difference def get_data(self, theta, params=None): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d)", "theta[self.edge_index[1]].view(-1, self.attr_features) else: xy = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) if theta is None: edge_attr =", "= l(x) return x class DenseNet_sin(torch.nn.Module): def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet_sin,", "edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2) data = Data(x=X,", "edge_index_global.append(edge_index_inner) edge_attr_global.append(edge_attr_inner) # construct X # if (is_high): # X = torch.cat([torch.zeros(n_l, l", "def abs(self, x, y): num_examples = x.size()[0] #Assume uniform mesh h = 1.0", "= self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 * self.d] = theta[self.edge_index[0]] edge_attr[:, 2 * self.d", "grid_split = torch.cat([grid_sub, grid_sample], dim=0) theta_split = torch.cat([theta_sub, theta_sample], dim=0) Y_split = torch.cat([Y_sub,", "edge_index.append((i, i + 1)) edge_index.append((i + 1, i )) if a != None:", "enumerate(self.layers): x = l(x) if j != self.n_layers - 1: x = torch.sin(x)", "/ n_x, a1, a2)) edge_attr.append((x / n_x, a2, a1)) X = torch.tensor(xs, dtype=torch.float)", "for i in range(self.splits): pred[sample_idx_list[i]] = out_list[i].reshape(-1) return pred # generate graph, with", "2 * self.d + 2)) edge_attr_down[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_down[l].T].reshape( (self.n_edges_inter[l], 2", "= self.std.cpu() # normalization, Gaussian class GaussianNormalizer(object): def __init__(self, x, eps=0.00001): super(GaussianNormalizer, self).__init__()", "= np.concatenate([boundary1, boundary2, boundary3, boundary4]) def boundary_connectivity2d(self, stride=1): boundary = self.boundary[::stride] boundary_size =", "= Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape)", "dtype=torch.long).reshape(self.resolution, self.resolution) def ball_connectivity(self, grid): pwd = sklearn.metrics.pairwise_distances(grid) edge_index = np.vstack(np.where(pwd <= self.radius))", "torch.cat([grid_sub, grid_sample],dim=0) theta_split = torch.cat([theta_sub, theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges, distance, X_difference,", "np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf) self.edge_index = np.vstack(np.where(sample)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long)", "self.edge_index_12 = np.vstack(np.where(pwd12 <= r12)) self.edge_index_12[1,:] = self.edge_index_12[1,:] + self.m self.edge_index_21 = self.edge_index_12[[1,0],:]", "Y_sub.reshape(-1,) index_split = index_sub.reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges, distance, X_difference,", "edge_attr[:, 3:3 + self.edge_features] = a[edge_index[0]] edge_attr[:, 3 + self.edge_features: 4 + self.edge_features", "in range(n): x2 = grid[i2] d = np.linalg.norm(x1-x2) if(d<=r): a1 = a[i1] a2", "(new_sample) or (self.perm is None): self.perm = torch.randperm(self.n) index = index0 for l", "mean = self.mean[:,sample_idx] # x is in shape of batch*n or T*batch*n x", "attr_features=1): super(RandomMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_size self.attr_features = attr_features assert", "else: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, params=params)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data", "vertice1], axis=0) self.n_edges_boundary = self.edge_index_boundary.shape[1] return torch.tensor(self.edge_index_boundary, dtype=torch.long) def attributes_boundary(self, f=None, theta=None): #", "len(pred) == self.num * self.l // batch_size2 out = torch.zeros(self.n, ) if cuda:", "edge_attr[:, 2 * self.d +1] = theta[self.edge_index[1]] else: xy = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) if theta", "range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) def forward(self, x): for j, l in enumerate(self.layers): x =", "np.transpose(x, axes=range(len(x.shape) - 1, -1, -1)) if self.to_float: x = x.astype(np.float32) if self.to_torch:", "torch.tensor(edge_attr, dtype=torch.float) def get_boundary(self): s = self.s n = self.n boundary1 = np.array(range(0,", "n_edge_index n_edge_index = n_edge_index + self.edge_index_down[l].shape[1] edge_index_down_range[l, 1] = n_edge_index edge_index_up_range[l, 1] =", "a[edge_index[0]] edge_attr[:, 4+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,", "2) data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) print('train', X.shape, Y_split.shape, edge_index.shape,", "dim=0).reshape(-1,) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) else: grid_split", "edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2) if params==None: data", "get_edge_attr(grid, theta, edge_index): n_edges = edge_index.shape[1] edge_attr = np.zeros((n_edges, 4)) edge_attr[:, 0:2] =", "shape of batch*n or T*batch*n x = (x * std) + mean return", "edge_attr_22[:, 2 * self.d + 1] = theta[self.edge_index_22[1]] return torch.tensor(edge_attr, dtype=torch.float), \\ torch.tensor(edge_attr_12,", "= self.s ny = self.s # pred_ij = pred_i[idx : idx + nx", "np.tile(x2.reshape(1, n), [n, 1]) X_diff = X1 - X2 Y1 = np.tile(y1.reshape(n, 1),", "edge_index_out = [] edge_index_down_out = [] edge_index_up_out = [] index = 0 for", "of sub-grid def get_data(self, theta, edge_features=1): data = [] for i in range(self.l):", "edge_attr_up_out = torch.cat(self.edge_attr_up, dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out def splitter(self, radius_inner, radius_inter, theta_a,", "if l==1: edge_index_nn = [] for x_i in range(s_l): for x in (-1,1):", "x i1 = (x+1)%n_x edge_index.append((i, i1)) edge_index.append((i1, i )) i2 = (x +", "self.edge_index_up = [] self.edge_attr = [] self.edge_attr_down = [] self.edge_attr_up = [] self.n_edges_inner", "range(n): x1 = grid[i1] for i2 in range(n): x2 = grid[i2] d =", "cuda(self): self.mean = self.mean.cuda() self.std = self.std.cuda() def cpu(self): self.mean = self.mean.cpu() self.std", "= edge_index[1, :] + self.grid_sample[l].shape[0] self.edge_index_down.append(edge_index) edge_index_down_out.append(torch.tensor(edge_index, dtype=torch.long)) self.edge_index_up.append(edge_index[[1,0],:]) edge_index_up_out.append(torch.tensor(edge_index[[1,0],:], dtype=torch.long)) self.n_edges_inter.append(edge_index.shape[1]) index", "self.idx_both def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float), \\ torch.tensor(self.grid_sample_i, dtype=torch.float), \\ torch.tensor(self.grid_sample_both, dtype=torch.float) def", "len(data), X.shape, edge_index.shape, edge_attr.shape) return data def sample(self, theta, Y): theta_d = theta.shape[1]", "assemble with downsample class DownsampleGridSplitter(object): def __init__(self, grid, resolution, r, m=100, radius=0.15, edge_features=1):", "= pred_i[idx : idx + nx * ny] out[x::self.r, y::self.r] = pred_ij[:nx *", "X = torch.tensor(xs, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index, dtype=torch.long).transpose(0,", "self.n: self.m = self.n self.idx = np.array(range(self.n)) self.idx_i = self.idx self.idx_both = self.idx", "def gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid) rbf = np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf) self.edge_index", "n), [n, 1]) X_diff = X1 - X2 Y1 = np.tile(y1.reshape(n, 1), [1,", "0.01) ** 2))) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index", "return data def sampleT(self, theta, Y, params=None): theta_d = theta.shape[1] theta = theta.reshape(self.resolution,", "edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr = np.zeros((n_edges, 4+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:,", "self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) if theta is None: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr_boundary = f(xy[:,0:self.d],", "graph class RandomMultiMeshGenerator(object): def __init__(self, real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshGenerator, self).__init__() self.d =", "1, i)) edge_attr.append((-1, 0, 0)) if (y != n_y - 1): edge_index.append((i, i", "super(UnitGaussianNormalizer, self).__init__() # x could be in shape of ntrain*n or ntrain*T*n or", "= self.pairwise_difference(grid, grid4) PWD = np.stack([pwd0,pwd1,pwd2,pwd3,pwd4], axis=2) X_DIFF = np.stack([X_diff0,X_diff1,X_diff2,X_diff3,X_diff4], axis=2) Y_DIFF =", "__init__(self, real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_sizes", "= sklearn.metrics.pairwise_distances(grid, grid2) X_diff2, Y_diff2 = self.pairwise_difference(grid, grid2) grid3 = grid grid3[:, :]", "edge_attr_22 = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) else: theta = theta[self.idx_both] edge_attr = np.zeros((self.n_edges, 3 *", "scaling by range class RangeNormalizer(object): def __init__(self, x, low=0.0, high=1.0): super(RangeNormalizer, self).__init__() mymin", "xy[:,self.d:]) else: edge_attr = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) def get_boundary(self):", "torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge_aug(n_x,", "if self.m > self.n: self.m = self.n self.idx = np.array(range(self.n)) self.idx_i = self.idx", "i2 = (x + 2) % n_x edge_index.append((i, i2)) edge_index.append((i2, i )) if", "X_diff1, Y_diff1 = self.pairwise_difference(grid, grid1) grid2 = grid grid2[:, 1] = grid[:, 1]", "= [] num_nodes = 0 # build connected graph for l in range(depth):", "1 grids = [] for j in range(self.d): grids.append(np.linspace(real_space[j][0], real_space[j][1], mesh_size[j])) self.n *=", "theta_l[:, ::r_l, :] theta_l = theta_l.reshape(N, n_l, theta_d) theta_l = torch.tensor(theta_l, dtype=torch.float) print(theta_l.shape)", "self.n_layers >= 1 self.layers = nn.ModuleList() for j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) def", "sample_size self.m_i = induced_point assert len(mesh_size) == self.d if self.d == 1: self.n", "Y.reshape(self.T, self.n)[:,idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0) theta_split = torch.cat([theta_sub, theta_sample], dim=0) Y_split", "range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) if j != self.n_layers - 1: if normalize: self.layers.append(nn.BatchNorm1d(layers[j+1])) self.layers.append(nonlinearity())", "theta[self.edge_index[0]] edge_attr[:, 2 * self.d + 1] = theta[self.edge_index[1]] edge_attr_12 = np.zeros((self.n_edges_12, 3", "edge_index=edge_index, edge_attr=edge_attr, split_idx=idx)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def assemble(self, pred,", "resolution if resolution%2==1: self.s = int(((resolution - 1)/r) + 1) else: self.s =", "np.tile(y1.reshape(n, 1), [1, n]) Y2 = np.tile(y2.reshape(1, n), [n, 1]) Y_diff = Y1", "edge_attr, mask_index, num_nodes) def multi_pole_grid1d(theta, theta_d, s, N, is_periodic=False): grid_list = [] theta_list", "edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, params=params)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def sample(self,", "edge_index, edge_attr def grid_edge(n_x, n_y, a=None): if a != None: a = a.reshape(n_x,", "torch.zeros((self.level-1,2), dtype=torch.long) n_edge_index = 0 for l in range(self.level): edge_index_range[l, 0] = n_edge_index", "assemble class RandomGridSplitter(object): def __init__(self, grid, resolution, d=2, m=200, l=1, radius=0.25): super(RandomGridSplitter, self).__init__()", "self.sample(new_sample=True, index0=index) else: idx, idx_all = self.sample(new_sample=False, index0=index) index = (index + self.m)", "radius_inter, theta_a, theta_all): # give a test mesh, generate a list of data", "Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def assemble(self, pred, split_idx, batch_size2, sigma=1): assert", "data = data.reshape(-1, grid_size, grid_size) data = data[:, ::l, ::l] data = data.reshape(-1,", "print('level',s_l,r_l,n_l) xs = np.linspace(0.0, 1.0, s_l) grid_l = xs grid_l = torch.tensor(grid_l, dtype=torch.float)", "file_path self.data = None self.old_mat = None self._load_file() def _load_file(self): try: self.data =", "theta[self.idx_both] edge_attr = np.zeros((self.n_edges, 3 * self.d)) edge_attr[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges,", "return data def assemble(self, pred, split_idx, batch_size2, sigma=1, cuda=False): assert len(pred) == len(split_idx)", "return out.reshape(self.T,self.n) def downsample(data, grid_size, l): data = data.reshape(-1, grid_size, grid_size) data =", "= np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def gaussian_connectivity(self, sigma):", "grid, grid_all = self.get_grid() edge_index, edge_index_down, edge_index_up = self.ball_connectivity(radius_inner, radius_inter) edge_index_range, edge_index_down_range, edge_index_up_range", "torch.tensor([x,y],dtype=torch.long).reshape(1,2) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def", "if (x != n_x - 1): d = 1 / n_x a1 =", "= Y_sub.reshape(self.T, -1) index_split = index_sub.reshape(-1, ) X = torch.cat([grid_split, theta_split], dim=1) edge_index,", "edge_attr = torch.cat(edge_attr_global, dim=0) mask_index = torch.tensor(range(n_x * n_y), dtype=torch.long) # print('create multi_grid", "if (x_j in range(s_l)): # if (xi, yi), (xj, yj) not NearestNeighbor if", "x class DenseNet_sin(torch.nn.Module): def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet_sin, self).__init__() self.n_layers =", "= index + self.grid_sample[l].shape[0] index = 0 for l in range(self.level-1): pwd =", "real_space[j][1], mesh_size[j])) self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T self.splits", "index+self.m[l]]) self.grid_sample.append(self.grid[self.idx[l]]) index = index+self.m[l] self.idx_all = perm[:index] self.grid_sample_all = self.grid[self.idx_all] return self.idx,", "return torch.mean(all_norms) else: return torch.sum(all_norms) return all_norms def rel(self, x, y): num_examples =", "def attributes(self, theta=None): if theta is None: edge_attr = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr_12 =", "+ 1)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2),", "radius_inner[l])) + index self.edge_index.append(edge_index) edge_index_out.append(torch.tensor(edge_index, dtype=torch.long)) self.n_edges_inner.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0] index", "grid[i2] d = np.linalg.norm(x1-x2) if(d<=r): a1 = a[i1] a2 = a[i2] edge_index.append((i1, i2))", "+ self.edge_features: 4 + self.edge_features * 2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float)", "self.grid[x::self.r, y::self.r,:].reshape(-1,2) theta_sub = theta[x::self.r, y::self.r,:].reshape(-1,theta_d) perm = torch.randperm(self.n) m = self.m -", "Y_sub.shape[1] if self.m >= n_sub: m = self.m - n_sub perm = torch.randperm(self.n)", "edge_index_list_cuda.append(edge_index_inter.cuda()) print('edge_inter', edge_index_inter.shape) print(len(grid_list),len(edge_index_list),len(theta_list)) return grid_list, theta_list, edge_index_list, edge_index_list_cuda def get_edge_attr(grid, theta, edge_index):", "\\ torch.tensor(self.edge_index_21, dtype=torch.long), \\ torch.tensor(self.edge_index_22, dtype=torch.long) def attributes(self, theta=None): if theta is None:", "in order to use graph network's data structure, # the edge index shall", "0 for l in range(self.level): edge_index_range[l, 0] = n_edge_index n_edge_index = n_edge_index +", "l * 2), X, torch.zeros(n_l, (depth - 1 - l) * 2)], dim=1)", "== self.level - 1 self.edge_index = [] self.edge_index_down = [] self.edge_index_up = []", "- 1) print(level) for l in range(1, level+1): r_l = 2 ** (l", "in range(s_l): for x in (-1,1): x_j = x_i + x if is_periodic:", "self.edge_index_boundary = np.stack([vertice2, vertice1], axis=0) self.n_edges_boundary = self.edge_index_boundary.shape[1] return torch.tensor(self.edge_index_boundary, dtype=torch.long) def attributes_boundary(self,", "edge_index.append((i2, i1)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2),", "return data def assembler(self, out_list, sample_idx_list, is_cuda=False): assert len(out_list) == self.splits if is_cuda:", "perm = torch.randperm(self.n) m = self.m - grid_sub.shape[0] idx = perm[:m] grid_sample =", "normalization, Gaussian class GaussianNormalizer(object): def __init__(self, x, eps=0.00001): super(GaussianNormalizer, self).__init__() self.mean = torch.mean(x)", "[] X_global = [] num_nodes = 0 # build connected graph for l", "= np.vstack(np.where(pwd <= self.radius)) n_edges = edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges def get_data(self,", "X = torch.cat(X_global, dim=0) edge_index = torch.cat(edge_index_global, dim=1) edge_attr = torch.cat(edge_attr_global, dim=0) mask_index", "sample_sizes): super(RandomMultiMeshSplitter, self).__init__() self.d = len(real_space) self.ms = sample_sizes self.m = sample_sizes[0] self.level", "len(real_space) self.ms = sample_sizes self.m = sample_sizes[0] self.level = level assert len(sample_sizes) ==", "* n_y), dtype=torch.long) # print('create multi_grid with size:', X.shape, edge_index.shape, edge_attr.shape, mask_index.shape) return", "here we downsample sub-grids self.grid = grid.reshape(resolution, resolution,2) # self.theta = theta.reshape(resolution, resolution,-1)", "range(s_l): for x in (-1,1): x_j = x_i + x if is_periodic: x_j", "= self.edge_index_boundary.shape[1] return torch.tensor(self.edge_index_boundary, dtype=torch.long) def attributes_boundary(self, f=None, theta=None): # if self.edge_index_boundary ==", "+ 2*self.attr_features] = theta[self.edge_index[1]].view(-1, self.attr_features) else: xy = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) if theta is", "self.edge_features = edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def ball_connectivity(self, grid): pwd =", "None: edge_attr = f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return", "= self.s-1 else: nx = self.s ny = self.s # pred_ij = pred_i[idx", "theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.n, )[idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0) theta_split =", "a[x, y] a2 = a[x, y+1] edge_index.append((i, i + n_x)) edge_attr.append((d, a1, a2,", "f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) def", "s, theta_d) theta_l = theta_l[:, ::r_l, :] theta_l = theta_l.reshape(N, n_l, theta_d) theta_l", "i==0: idx, idx_all = self.sample(new_sample=True, index0=index) else: idx, idx_all = self.sample(new_sample=False, index0=index) index", "theta is None: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index_boundary[0]],", "decode(self, x, sample_idx=None): if sample_idx is None: std = self.std + self.eps #", "def load_file(self, file_path): self.file_path = file_path self._load_file() def read_field(self, field): x = self.data[field]", "* 2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) if params==None:", "self.edge_index[1]] print(self.edge_index.shape) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def torus1d_connectivity(self, r): grid =", "self.grid.reshape(self.n, -1)[idx] theta_sample = theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.T, self.n)[:,idx] grid_split = torch.cat([grid_sub,", "y] a2 = a[x + 1, y] edge_index.append((i, i + 1)) edge_attr.append((d, a1,", "np.zeros((self.n_edges_inter[l], 2 * self.d + 2)) edge_attr_down[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_down[l].T].reshape( (self.n_edges_inter[l],", "= np.zeros((n_edges, 2*self.d+2)) a = theta_sample[:,0] edge_attr[:, :2*self.d] = grid_sample[edge_index.T].reshape(n_edges, -1) edge_attr[:, 2*self.d]", "torch.tensor(out, dtype=torch.float) return out.reshape(-1,) def assembleT(self, pred, split_idx, batch_size2, sigma=1): # pred is", "edge_index_list.append(edge_index_nn) edge_index_list_cuda.append(edge_index_nn.cuda()) print('edge', edge_index_nn.shape) # we then compute the interactive neighbors -- their", "super(DenseNet_sin, self).__init__() self.n_layers = len(layers) - 1 assert self.n_layers >= 1 self.layers =", "= x.view(s[0], -1) x = self.a*x + self.b x = x.view(s) return x", "2 * self.d +1] = theta[self.edge_index_boundary[1]] else: xy = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) if theta is", "else: edge_attr_boundary = np.zeros((self.n_edges_boundary, 2*self.d+2)) edge_attr_boundary[:,0:2*self.d] = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) edge_attr_boundary[:, 2 * self.d] =", "x_i in range(s_l): for x in range(-3,4): x_j = x_i + x #", "[] if (new_sample) or (self.perm is None): self.perm = torch.randperm(self.n) index = index0", "self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def attributes(self, f=None, theta=None): if f is None: if", "grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4+self.edge_features] = a[edge_index[0]] edge_attr[:, 4+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr =", "axis=2) pwd_index = np.argmin(PWD, axis=2) edge_index = np.vstack(np.where(pwd <= self.radius)) pwd_index = pwd_index[np.where(pwd", "resolution,2) # self.theta = theta.reshape(resolution, resolution,-1) # self.y = y.reshape(resolution, resolution,1) self.resolution =", "= -self.a*mymax + high def encode(self, x): s = x.size() x = x.view(s[0],", "= theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.T, self.resolution, self.resolution) x = torch.randint(0, self.r,", "= int(np.log2(s) - 1) print(level) for l in range(1, level+1): r_l = 2", "else: grid_split = grid_sub theta_split = theta_sub Y_split = Y_sub.reshape(self.T, -1) index_split =", "self.data = scipy.io.loadmat(self.file_path) self.old_mat = True except: self.data = h5py.File(self.file_path) self.old_mat = False", "mode='constant', cval=0) # out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) # generate multi-level graph,", "+ 1] = theta[self.edge_index[1]] edge_attr_12 = np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_12[:, 0:2 *", "= [] self.edge_attr_up = [] self.n_edges_inner = [] self.n_edges_inter = [] def sample(self,", "len(split_idx) assert len(pred) == self.r**2 // batch_size2 out = torch.zeros((self.resolution,self.resolution)) for i in", "= self.grid[x::self.r, y::self.r,:].reshape(-1,2) theta_sub = theta[x::self.r, y::self.r,:].reshape(-1,theta_d) perm = torch.randperm(self.n) m = self.m", "** 2))) if (y != n_y - 1): d = 1 / n_y", "edge_index = np.vstack(np.where(pwd <= radius_inner[l])) + index self.edge_index.append(edge_index) edge_index_out.append(torch.tensor(edge_index, dtype=torch.long)) self.n_edges_inner.append(edge_index.shape[1]) index =", "# in order to use graph network's data structure, # the edge index", "batch*n or T*batch*n x = (x * std) + mean return x def", "1, -1, -1)) if self.to_float: x = x.astype(np.float32) if self.to_torch: x = torch.from_numpy(x)", "index+self.m[l] self.idx_all = perm[:index] self.grid_sample_all = self.grid[self.idx_all] return self.idx, self.idx_all def get_grid(self): grid_out", "theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr =", "data def sampleT(self, theta, Y, params=None): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution,", "# build connected graph for l in range(depth): h_x_l = n_x // (2", "np import scipy.io import h5py import sklearn.metrics from torch_geometric.data import Data import torch.nn", "return x def decode(self, x, sample_idx=None): if sample_idx is None: std = self.std", "index edge_index[1, :] = edge_index[1, :] + self.grid_sample[l].shape[0] self.edge_index_down.append(edge_index) edge_index_down_out.append(torch.tensor(edge_index, dtype=torch.long)) self.edge_index_up.append(edge_index[[1,0],:]) edge_index_up_out.append(torch.tensor(edge_index[[1,0],:],", "grid) X_diff0, Y_diff0 = self.pairwise_difference(grid, grid) grid1 = grid grid1[:,0] = grid[:,0]+1 pwd1", "dtype=torch.float), \\ torch.tensor(self.grid_sample_both, dtype=torch.float) def ball_connectivity(self, r11, r12, r22): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) pwd12", "x_j = x_j % s_l if (x_j in range(s_l)): # if (xi, yi),", "* self.d + self.attr_features: 2 * self.d + 2*self.attr_features] = theta[self.edge_index[1]].view(-1, self.attr_features) else:", "self.data[field] if not self.old_mat: x = x[()] x = np.transpose(x, axes=range(len(x.shape) - 1,", "= induced_point assert len(mesh_size) == self.d if self.d == 1: self.n = mesh_size[0]", "np.vstack(np.where(pwd <= r11)) self.edge_index_12 = np.vstack(np.where(pwd12 <= r12)) self.edge_index_12[1,:] = self.edge_index_12[1,:] + self.m", "real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshSplitter, self).__init__() self.d = len(real_space) self.ms = sample_sizes self.m", "# pred_ij = pred_i[idx : idx + nx * ny] out[t, x::self.r, y::self.r]", "np.concatenate([boundary1, boundary2, boundary3, boundary4]) def boundary_connectivity2d(self, stride=1): boundary = self.boundary[::stride] boundary_size = len(boundary)", "# instead of randomly sample sub-grids, here we downsample sub-grids self.grid = grid.reshape(resolution,", "theta=None): if theta is None: edge_attr = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr_12 = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1))", "mymin = torch.min(x, 0)[0].view(-1) mymax = torch.max(x, 0)[0].view(-1) self.a = (high - low)/(mymax", "grid_all = self.get_grid() edge_index, edge_index_down, edge_index_up = self.ball_connectivity(radius_inner, radius_inter) edge_index_range, edge_index_down_range, edge_index_up_range =", "= np.vstack(np.where(sample)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def attributes(self, f=None, theta=None): if", "+ 1] = theta[self.edge_index[l][1]] self.edge_attr.append(torch.tensor(edge_attr, dtype=torch.float)) for l in range(self.level - 1): edge_attr_down", "= theta_sub Y_split = Y_sub.reshape(-1,) index_split = index_sub.reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1)", "x/n_x, a2, a1)) if (y != n_y - 1): d = 1 /", "edge_attr_22[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) edge_attr_22[:, 2 * self.d] = theta[self.edge_index_22[0]]", "/ self.l # out = gaussian_filter(out, sigma=sigma, mode='constant', cval=0) # out = torch.tensor(out,", "= np.zeros((self.n_edges_inter[l], 2 * self.d + 2)) edge_attr_up = np.zeros((self.n_edges_inter[l], 2 * self.d", "perm[:index] self.grid_sample_all = self.grid[self.idx_all] return self.idx, self.idx_all def get_grid(self): grid_out = [] for", "= sample_sizes self.level = level assert len(sample_sizes) == level assert len(mesh_size) == self.d", "= self.grid[self.idx_all] return self.idx, self.idx_all def get_grid(self): grid_out = [] for grid in", "f is None: if theta is None: edge_attr_boundary = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) else: edge_attr_boundary =", "range(self.splits): if i==0: idx, idx_all = self.sample(new_sample=True, index0=index) else: idx, idx_all = self.sample(new_sample=False,", "for l in range(self.level - 1): edge_attr_down = np.zeros((self.n_edges_inter[l], 2 * self.d +", "y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='wrap') out = torch.tensor(out,", "order to use graph network's data structure, # the edge index shall be", "index_end = (index+self.ms[l]) % self.n if index < index_end: idx = self.perm[index: index_end]", "n_edges = edge_index.shape[1] edge_attr = np.zeros((n_edges, 4)) edge_attr[:, 0:2] = grid[edge_index.transpose(0,1)].reshape((n_edges, -1)) edge_attr[:,", "torch.tensor(self.grid_sample, dtype=torch.float), \\ torch.tensor(self.grid_sample_i, dtype=torch.float), \\ torch.tensor(self.grid_sample_both, dtype=torch.float) def ball_connectivity(self, r11, r12, r22):", "__init__(self, grid, resolution, r, m=100, radius=0.15, edge_features=1): super(DownsampleGridSplitter, self).__init__() # instead of randomly", "for x_i in range(s_l): for x in (-1,1): x_j = x_i + x", "np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T if self.m > self.n: self.m = self.n self.idx", "self.edge_index_22.shape[1] return torch.tensor(self.edge_index, dtype=torch.long), \\ torch.tensor(self.edge_index_12, dtype=torch.long), \\ torch.tensor(self.edge_index_21, dtype=torch.long), \\ torch.tensor(self.edge_index_22, dtype=torch.long)", "out / self.l # out = gaussian_filter(out, sigma=sigma, mode='constant', cval=0) # out =", "1]) X_diff = X1 - X2 Y1 = np.tile(y1.reshape(n, 1), [1, n]) Y2", "pred_i[j,:].reshape(-1,) idx = split_idx_i[j,:].reshape(-1,) out[idx] = out[idx] + pred_ij out = out /", "> 0 self.d = d self.p = p self.reduction = reduction self.size_average =", "grid_sample],dim=0) theta_split = torch.cat([theta_sub, theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges, distance, X_difference, Y_difference", "y::self.r, :].reshape(-1, 2) theta_sub = theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub = Y[x::self.r, y::self.r].reshape(-1,)", "= np.linspace(0.0, 1.0, n_y) # xs = np.array(range(n_x)) # ys = np.array(range(n_y)) grid", "Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split, params=params) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape)", "= Y1 - Y2 return X_diff, Y_diff def torus_connectivity(self, grid): pwd0 = sklearn.metrics.pairwise_distances(grid,", "edge_index_global.append(edge_index_inter) edge_attr_global.append(edge_attr_inter) X = torch.cat(X_global, dim=0) edge_index = torch.cat(edge_index_global, dim=1) edge_attr = torch.cat(edge_attr_global,", "+ x # if (xj, yj) is a valid node if is_periodic: x_j", "= n_edge_index n_edge_index = n_edge_index + self.edge_index_down[l].shape[1] edge_index_down_range[l, 1] = n_edge_index edge_index_up_range[l, 1]", "we construct the nearest neighbors (NN) if l==1: edge_index_nn = [] for x_i", "edge_attr.append((x/n_x, y/n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x, a2, a1)) X = torch.tensor(grid, dtype=torch.float) #", "-1)) edge_attr[:, 2 * self.d] = theta[self.edge_index[0]] edge_attr[:, 2 * self.d + 1]", "edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d /", "= x.size()[0] diff_norms = torch.norm(x.reshape(num_examples,-1) - y.reshape(num_examples,-1), self.p, 1) y_norms = torch.norm(y.reshape(num_examples,-1), self.p,", "torch.cat(edge_index_down_out, dim=1) edge_index_up_out = torch.cat(edge_index_up_out, dim=1) return edge_index_out, edge_index_down_out, edge_index_up_out def get_edge_index_range(self): #", "X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def sampleT(self, theta, Y, params=None): theta_d", "self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 * self.d : 2 * self.d + self.attr_features] =", "[] edge_attr = [] for y in range(n_y): for x in range(n_x): i", "if is_periodic: x_j = x_j % s_l # if (xj, yj) is a", "xy[:, self.d:]) else: theta = theta[self.idx] edge_attr = f(xy[:, 0:self.d], xy[:, self.d:], theta[self.edge_index[0]],", "range(self.d): grids.append(np.linspace(real_space[j][0], real_space[j][1], mesh_size[j])) self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in", "# X_l = torch.tensor(l, dtype=torch.float).repeat(n_l, 1) # X = torch.cat([X, X_l], dim=1) X_global.append(X)", "a[i1] a2 = a[i2] edge_index.append((i1, i2)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 *", "def read_field(self, field): x = self.data[field] if not self.old_mat: x = x[()] x", "edge_attr_up=edge_attr_up, sample_idx=idx[0])) return data def assembler(self, out_list, sample_idx_list, is_cuda=False): assert len(out_list) == self.splits", "edge_index.append((i1, i )) i2 = (x + 2) % n_x edge_index.append((i, i2)) edge_index.append((i2,", "if self.to_cuda: x = x.cuda() return x def set_cuda(self, to_cuda): self.to_cuda = to_cuda", "- 1): edge_attr_down = self.grid_sample_all[self.edge_index_down[l].T].reshape((self.n_edges_inter[l], 2*self.d)) edge_attr_up = self.grid_sample_all[self.edge_index_up[l].T].reshape((self.n_edges_inter[l], 2*self.d)) self.edge_attr_down.append(torch.tensor(edge_attr_down)) self.edge_attr_up.append(torch.tensor(edge_attr_up)) else:", "def encode(self, x): x = (x - self.mean) / (self.std + self.eps) return", "None self._load_file() def _load_file(self): try: self.data = scipy.io.loadmat(self.file_path) self.old_mat = True except: self.data", "!= n_y - 1): edge_index.append((i, i + n_x)) edge_attr.append((0, 1, 0)) edge_index.append((i +", "NearestNeighbor edge_index_inter = [] for x_i in range(s_l): for x in range(-3,4): x_j", "and label the range of each level edge_index_range = torch.zeros((self.level,2), dtype=torch.long) edge_index_down_range =", "2 * self.d + self.attr_features: 2 * self.d + 2*self.attr_features] = theta[self.edge_index[1]].view(-1, self.attr_features)", "= theta[self.edge_index[l][1]] self.edge_attr.append(torch.tensor(edge_attr, dtype=torch.float)) for l in range(self.level - 1): edge_attr_down = np.zeros((self.n_edges_inter[l],", "xs grid_l = torch.tensor(grid_l, dtype=torch.float) print(grid_l.shape) grid_list.append(grid_l) theta_l = theta[:,:,:theta_d].reshape(N, s, theta_d) theta_l", "= a[x, y] a2 = a[x, y+1] edge_attr.append((x/n_x, y/n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x,", "edge_attr_21 = np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_21[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1))", "self.idx self.idx_both = self.idx self.grid_sample = self.grid self.grid_sample_i = self.grid self.grid_sample_both = self.grid", "2*self.d+2)) edge_attr[:,0:2*self.d] = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) edge_attr[:, 2 * self.d] = theta[self.edge_index[0]] edge_attr[:, 2 *", "2*self.d)) self.edge_attr.append(torch.tensor(edge_attr)) for l in range(self.level - 1): edge_attr_down = self.grid_sample_all[self.edge_index_down[l].T].reshape((self.n_edges_inter[l], 2*self.d)) edge_attr_up", "ball_connectivity(self, grid): pwd = sklearn.metrics.pairwise_distances(grid) edge_index = np.vstack(np.where(pwd <= self.radius)) n_edges = edge_index.shape[1]", "np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T self.splits = self.n // self.m # number of", "edge_index_range[l, 1] = n_edge_index n_edge_index = 0 for l in range(self.level-1): edge_index_down_range[l, 0]", "= self.grid self.grid_sample_both = self.grid def sample(self): perm = torch.randperm(self.n) self.idx = perm[:self.m]", "sigma=1): assert len(pred) == len(split_idx) assert len(pred) == self.r**2 // batch_size2 out =", "[] for x_i in range(s_l): for x in (-1,1): x_j = x_i +", "abs(x)>=2: # if their parents are NN if abs(x_i//2 - x_j//2)%(s_l//2) <=1: edge_index_inter.append([x_i,x_j])", "x = torch.from_numpy(x) if self.to_cuda: x = x.cuda() return x def set_cuda(self, to_cuda):", "h_x_l, a) elif grid == 'grid_edge_aug': X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l, a)", "# A simple feedforward neural network class DenseNet(torch.nn.Module): def __init__(self, layers, nonlinearity, out_nonlinearity=None,", "= self.grid def sample(self): perm = torch.randperm(self.n) self.idx = perm[:self.m] self.idx_i = perm[self.m:", "[n, 1]) Y_diff = Y1 - Y2 return X_diff, Y_diff def torus_connectivity(self, grid):", "edge_attr[:, 3 + self.edge_features: 4 + self.edge_features * 2] = a[edge_index[1]] edge_attr =", "= self.edge_index_12[1,:] + self.m self.edge_index_21 = self.edge_index_12[[1,0],:] self.edge_index_22 = np.vstack(np.where(pwd22 <= r22)) +", "in range(-3,4): x_j = x_i + x # if (xj, yj) is a", "/ n_x a1 = a[x, y] a2 = a[x + 1, y] edge_index.append((i,", "return data def simple_grid(n_x, n_y): xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0,", "def pairwise_difference(self,grid1, grid2): n = grid1.shape[0] x1 = grid1[:,0] y1 = grid1[:,1] x2", "Y_diff3, Y_diff4], axis=2) pwd = np.min(PWD, axis=2) pwd_index = np.argmin(PWD, axis=2) edge_index =", "theta[self.edge_index[l][1]] self.edge_attr.append(torch.tensor(edge_attr, dtype=torch.float)) for l in range(self.level - 1): edge_attr_down = np.zeros((self.n_edges_inter[l], 2", "<= self.radius)] PWD_index = (np.where(pwd <= self.radius)[0], np.where(pwd <= self.radius)[1], pwd_index) distance =", "if len(self.mean.shape) == len(sample_idx[0].shape): std = self.std[sample_idx] + self.eps # batch*n mean =", "if j != self.n_layers - 1: x = torch.sin(x) return x # generate", "2) theta_sub = theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub = Y[x::self.r, y::self.r].reshape(-1,) index_sub =", "pwd0 = sklearn.metrics.pairwise_distances(grid, grid) X_diff0, Y_diff0 = self.pairwise_difference(grid, grid) grid1 = grid grid1[:,0]", "= theta_l[:, ::r_l, :] theta_l = theta_l.reshape(N, n_l, theta_d) theta_l = torch.tensor(theta_l, dtype=torch.float)", "eps=0.00001): super(UnitGaussianNormalizer, self).__init__() # x could be in shape of ntrain*n or ntrain*T*n", "return x # generate graphs on square domain class SquareMeshGenerator(object): def __init__(self, real_space,", "a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) if params==None: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr,", "file_path): self.file_path = file_path self._load_file() def read_field(self, field): x = self.data[field] if not", "= pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='constant', cval=0) out = torch.tensor(out,", "= theta[self.edge_index_22[1]] return torch.tensor(edge_attr, dtype=torch.float), \\ torch.tensor(edge_attr_12, dtype=torch.float), \\ torch.tensor(edge_attr_21, dtype=torch.float), \\ torch.tensor(edge_attr_22,", "Y_diff1 = self.pairwise_difference(grid, grid1) grid2 = grid grid2[:, 1] = grid[:, 1] +", "** 2))) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index =", "# x is in shape of batch*n or T*batch*n x = (x *", "self.grid_sample_i = self.grid self.grid_sample_both = self.grid def sample(self): perm = torch.randperm(self.n) self.idx =", "3:3 + self.edge_features] = a[edge_index[0]] edge_attr[:, 3 + self.edge_features: 4 + self.edge_features *", "'grid_edge_aug': X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l, a) # update index edge_index_inner =", "* 2)], dim=1) # else: # X_l = torch.tensor(l, dtype=torch.float).repeat(n_l, 1) # X", "self.perm = torch.randperm(self.n) index = index0 for l in range(self.level): index = index", "= [] theta_list = [] edge_index_list = [] edge_index_list_cuda = [] level =", "# if self.edge_index_boundary == None: # self.boundary_connectivity2d() if f is None: if theta", "split_idx[i] for j in range(batch_size2): pred_ij = pred_i[j,:] x, y = split_idx_i[j] if", "= np.zeros((n_edges, 4)) edge_attr[:, 0:2] = grid[edge_index.transpose(0,1)].reshape((n_edges, -1)) edge_attr[:, 2] = theta[edge_index[0]] edge_attr[:,", "= torch.randperm(self.n) m = self.m - grid_sub.shape[0] idx = perm[:m] grid_sample = self.grid.reshape(self.n,-1)[idx]", "= grid grid3[:, :] = grid[:, :] + 1 pwd3 = sklearn.metrics.pairwise_distances(grid, grid3)", "is None: edge_attr = f(xy[:, 0:self.d], xy[:, self.d:]) else: theta = theta[self.idx] edge_attr", "index + self.grid_sample[l].shape[0] index = 0 for l in range(self.level-1): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l],", "== 0 self.num = self.n // self.m # number of sub-grid def get_data(self,", "n_edges def get_data(self, theta): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) data", "out = torch.zeros((self.resolution,self.resolution)) for i in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i =", "2) if params==None: data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) else: data", "edge_attr_global = [] X_global = [] num_nodes = 0 # build connected graph", "idx], dim=0).reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) else: grid_split = grid_sub theta_split =", "pwd2 = sklearn.metrics.pairwise_distances(grid, grid2) X_diff2, Y_diff2 = self.pairwise_difference(grid, grid2) grid3 = grid grid3[:,", "= np.linalg.norm(x1-x2) if(d<=r): a1 = a[i1] a2 = a[i2] edge_index.append((i1, i2)) edge_attr.append((d, a1,", "X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l, a) # update index edge_index_inner = edge_index_inner", "* self.d] = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 * self.d] = theta[self.edge_index[0]] edge_attr[:, 2", "n_edge_index edge_index_up_range[l, 0] = n_edge_index n_edge_index = n_edge_index + self.edge_index_down[l].shape[1] edge_index_down_range[l, 1] =", "grids.append(np.linspace(real_space[j][0], real_space[j][1], mesh_size[j])) self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T", "Y_diff4], axis=2) pwd = np.min(PWD, axis=2) pwd_index = np.argmin(PWD, axis=2) edge_index = np.vstack(np.where(pwd", "axis=2) edge_index = np.vstack(np.where(pwd <= self.radius)) pwd_index = pwd_index[np.where(pwd <= self.radius)] PWD_index =", "if(d<=r): a1 = a[i1] a2 = a[i2] edge_index.append((i1, i2)) edge_attr.append((d, a1, a2, 1", "= [] if theta is None: for l in range(self.level): edge_attr = self.grid_sample_all[self.edge_index[l].T].reshape((self.n_edges_inner[l],", "dim=0).reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) else: grid_split = grid_sub theta_split = theta_sub", "4+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1,", "np.array(range(n_x)) # ys = np.array(range(n_y)) edge_index = [] edge_attr = [] for x", "n_x // (2 ** l) h_y_l = n_y // (2 ** l) n_l", "// batch_size2 out = torch.zeros((self.resolution,self.resolution)) for i in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m)", "torch.tensor(self.edge_index, dtype=torch.long) def gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid) rbf = np.exp(-pwd**2/sigma**2) sample =", "NN if abs(x_i//2 - x_j//2)%(s_l//2) <=1: edge_index_inter.append([x_i,x_j]) edge_index_inter = torch.tensor(edge_index_inter, dtype=torch.long) edge_index_inter =", "sample = np.random.binomial(1,rbf) self.edge_index = np.vstack(np.where(sample)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def", "- 1): d = 1 / n_y a1 = a[x, y] a2 =", "= grid2[:,0] y2 = grid2[:,1] X1 = np.tile(x1.reshape(n, 1), [1, n]) X2 =", "if is_forward: print(self.edge_index.shape) self.edge_index = self.edge_index[:, self.edge_index[0] >= self.edge_index[1]] print(self.edge_index.shape) self.n_edges = self.edge_index.shape[1]", "== self.level assert len(radius_inter) == self.level - 1 self.edge_index = [] self.edge_index_down =", "torch.repeat is different from numpy index2 = index2.repeat(2, axis = 0).repeat(2, axis =", "class DownsampleGridSplitter(object): def __init__(self, grid, resolution, r, m=100, radius=0.15, edge_features=1): super(DownsampleGridSplitter, self).__init__() #", "n_x)) edge_index.append((i + n_x, i)) if a != None: a1 = a[x, y]", "edge_attr_down_out = torch.cat(self.edge_attr_down, dim=0) edge_attr_up_out = torch.cat(self.edge_attr_up, dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out #", "torch.tensor(self.edge_index_boundary, dtype=torch.long) def attributes_boundary(self, f=None, theta=None): # if self.edge_index_boundary == None: # self.boundary_connectivity2d()", "if a != None: a1 = a[x] a2 = a[x + 1] edge_attr.append((x", "= torch.cat([grid_split, theta_split], dim=1) else: grid_split = grid_sub theta_split = theta_sub Y_split =", "self.resolution, self.resolution) x = torch.randint(0, self.r, (1,)) y = torch.randint(0, self.r, (1,)) grid_sub", "self.edge_features: 4 + self.edge_features * 2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx", "xy[:,self.d:]) else: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index_boundary[0]], theta[self.edge_index_boundary[1]]) return torch.tensor(edge_attr_boundary, dtype=torch.float) # generate", ") edge_attr[:, 2] = distance.reshape(n_edges, ) edge_attr[:, 3:3+self.edge_features] = a[edge_index[0]] edge_attr[:, 3+self.edge_features: 4+self.edge_features*2]", "grid1.shape[0] x1 = grid1[:,0] y1 = grid1[:,1] x2 = grid2[:,0] y2 = grid2[:,1]", "= torch.cat([index_sub, idx], dim=0).reshape(-1, ) X = torch.cat([grid_split, theta_split], dim=1) else: grid_split =", "if self.d == 1: self.n = mesh_size[0] self.grid = np.linspace(real_space[0][0], real_space[0][1], self.n).reshape((self.n, 1))", "edge_attr[:, :2*self.d] = grid_sample[edge_index.T].reshape(n_edges, -1) edge_attr[:, 2*self.d] = a[edge_index[0]] edge_attr[:, 2*self.d+1] = a[edge_index[1]]", "s)) self.boundary = np.concatenate([boundary1, boundary2, boundary3, boundary4]) def boundary_connectivity2d(self, stride=1): boundary = self.boundary[::stride]", "self.n // self.m # number of sub-grid if self.splits * self.m < self.n:", "self.idx_both = self.idx self.grid_sample = self.grid self.grid_sample_i = self.grid self.grid_sample_both = self.grid def", "def multi_grid(depth, n_x, n_y, grid, params): edge_index_global = [] edge_attr_global = [] X_global", "self.d + 2)) edge_attr_down[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_down[l].T].reshape( (self.n_edges_inter[l], 2 * self.d))", "rbf = np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf) self.edge_index = np.vstack(np.where(sample)) self.n_edges = self.edge_index.shape[1] return", "j != self.n_layers - 1: if normalize: self.layers.append(nn.BatchNorm1d(layers[j+1])) self.layers.append(nonlinearity()) if out_nonlinearity is not", "structure, # the edge index shall be stored as tensor instead of list", "theta_split = theta_sub Y_split = Y_sub.reshape(-1,) index_split = index_sub.reshape(-1,) X = torch.cat([grid_split, theta_split],", "return all_norms def rel(self, x, y): num_examples = x.size()[0] diff_norms = torch.norm(x.reshape(num_examples,-1) -", "= np.array(range(0, s)) boundary2 = np.array(range(n - s, n)) boundary3 = np.array(range(s, n,", "dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) if params==None: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) else: data.append(Data(x=X, edge_index=edge_index,", "pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) X_diff1, Y_diff1 = self.pairwise_difference(grid, grid1) grid2 = grid grid2[:,", "0:2] = grid[edge_index.transpose(0,1)].reshape((n_edges, -1)) edge_attr[:, 2] = theta[edge_index[0]] edge_attr[:, 3] = theta[edge_index[1]] return", "def rel(self, x, y): num_examples = x.size()[0] diff_norms = torch.norm(x.reshape(num_examples,-1) - y.reshape(num_examples,-1), self.p,", "pred[i][t].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for j in range(batch_size2): pred_ij = pred_i[j,:] x,", "if f is None: if theta is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) else: edge_attr", "= self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_22 = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) else: theta = theta[self.idx_both] edge_attr =", "= torch.cat(self.edge_attr_up, dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out def splitter(self, radius_inner, radius_inter, theta_a, theta_all):", "neural network class DenseNet(torch.nn.Module): def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet, self).__init__() self.n_layers", "= pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='wrap') out = torch.tensor(out, dtype=torch.float)", "self.to_cuda = to_cuda def set_torch(self, to_torch): self.to_torch = to_torch def set_float(self, to_float): self.to_float", "pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i].reshape(batch_size2, self.m) for j in range(batch_size2): pred_ij", "mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T if self.m > self.n: self.m", "x = x[()] x = np.transpose(x, axes=range(len(x.shape) - 1, -1, -1)) if self.to_float:", "f is None: if theta is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) else: edge_attr =", "self.radius = radius assert self.n % self.m == 0 self.num = self.n //", "return pred # generate graph, with split and assemble with downsample class DownsampleGridSplitter(object):", "self.idx.append(idx) self.grid_sample.append(self.grid[idx]) index = index_end if index0 < index_end: idx_all = self.perm[index0: index_end]", "edge_index_list_cuda.append(edge_index_nn.cuda()) print('edge', edge_index_nn.shape) # we then compute the interactive neighbors -- their parents", "dtype=torch.long)) self.edge_index_up.append(edge_index[[1,0],:]) edge_index_up_out.append(torch.tensor(edge_index[[1,0],:], dtype=torch.long)) self.n_edges_inter.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0] edge_index_out = torch.cat(edge_index_out,", "= edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges, distance, X_difference, Y_difference def get_data(self, theta, params=None):", "perm = torch.randperm(self.n) self.idx = perm[:self.m] self.idx_i = perm[self.m: self.m+self.m_i] self.idx_both = perm[:", "idx_all = self.sample(new_sample=False, index0=index) index = (index + self.m) % self.n grid, grid_all", "1): edge_index.append((i, i + 1)) edge_attr.append((1, 0, 0)) edge_index.append((i + 1, i)) edge_attr.append((-1,", "2))) if (y != n_y - 1): d = 1 / n_y a1", "xx in np.meshgrid(*grids)]).T def ball_connectivity(self, r): pwd = sklearn.metrics.pairwise_distances(self.grid) self.edge_index = np.vstack(np.where(pwd <=", "* s - 1, n, s)) self.boundary = np.concatenate([boundary1, boundary2, boundary3, boundary4]) def", "pred = torch.zeros(self.n, ) for i in range(self.splits): pred[sample_idx_list[i]] = out_list[i].reshape(-1) return pred", "x = np.transpose(x, axes=range(len(x.shape) - 1, -1, -1)) if self.to_float: x = x.astype(np.float32)", "edge_attr.shape, index_split.shape) return data def assemble(self, pred, split_idx, batch_size2, sigma=1): assert len(pred) ==", "a2)) edge_attr.append((y/n_y, x/n_x, a2, a1)) if (y != n_y - 1): d =", "theta[self.edge_index_21[0]] edge_attr_21[:, 2 * self.d + 1] = theta[self.edge_index_21[1]] edge_attr_22 = np.zeros((self.n_edges_22, 3", "n_edge_index = 0 for l in range(self.level): edge_index_range[l, 0] = n_edge_index n_edge_index =", ":].reshape(-1, 2) theta_sub = theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub = Y[x::self.r, y::self.r].reshape(-1,) index_sub", "= None self.old_mat = None self._load_file() def _load_file(self): try: self.data = scipy.io.loadmat(self.file_path) self.old_mat", "forward(self, x): for j, l in enumerate(self.layers): x = l(x) if j !=", "pred_i[j,:] x, y = split_idx_i[j] if self.resolution%2==1: if x==0: nx = self.s else:", "torch.mean(all_norms) else: return torch.sum(all_norms) return all_norms def rel(self, x, y): num_examples = x.size()[0]", "edge_index_down, edge_index_up = self.ball_connectivity(radius_inner, radius_inter) edge_index_range, edge_index_down_range, edge_index_up_range = self.get_edge_index_range() edge_attr, edge_attr_down, edge_attr_up", "index_end: idx = self.perm[index: index_end] else: idx = torch.cat((self.perm[index: ],self.perm[: index_end]), dim=0) self.idx.append(idx)", "1)) edge_index.append((i + 1, i )) if a != None: a1 = a[x,", "0 # build connected graph for l in range(depth): h_x_l = n_x //", "uniform mesh h = 1.0 / (x.size()[1] - 1.0) all_norms = (h**(self.d/self.p))*torch.norm(x.view(num_examples,-1) -", "for i in range(len(pred)): pred_i = pred[i][t].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for j", "2) return data def simple_grid(n_x, n_y): xs = np.linspace(0.0, 1.0, n_x) ys =", "= self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] grid_split = torch.cat([grid_sub, grid_sample],dim=0) theta_split = torch.cat([theta_sub, theta_sample],dim=0)", "return x def set_cuda(self, to_cuda): self.to_cuda = to_cuda def set_torch(self, to_torch): self.to_torch =", "mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T self.idx = [] self.idx_all =", "[] edge_attr_global = [] X_global = [] num_nodes = 0 # build connected", "xy[:,self.d:], theta[self.edge_index_boundary[0]], theta[self.edge_index_boundary[1]]) return torch.tensor(edge_attr_boundary, dtype=torch.float) # generate graphs with sampling class RandomMeshGenerator(object):", "i + 1)) edge_attr.append((1, 0, 0)) edge_index.append((i + 1, i)) edge_attr.append((-1, 0, 0))", "split_idx=split_idx, sample_idx=index_split, params=params) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def assemble(self,", "x #loss function with rel/abs Lp loss class LpLoss(object): def __init__(self, d=2, p=2,", "self.edge_index = np.vstack(np.where(sample)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def get_grid(self): return torch.tensor(self.grid,", "set_float(self, to_float): self.to_float = to_float # normalization, pointwise gaussian class UnitGaussianNormalizer(object): def __init__(self,", "x, sample_idx=None): x = (x * (self.std + self.eps)) + self.mean return x", "' number of splits:', self.splits ) self.perm = None self.idx = [] self.idx_all", "grid4[:, 0] = grid[:, 0] + 1 grid4[:, 1] = grid[:, 1] -", "print(grid_l.shape) grid_list.append(grid_l) theta_l = theta[:,:,:theta_d].reshape(N, s, theta_d) theta_l = theta_l[:, ::r_l, :] theta_l", "= self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub = theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub =", "X.shape, edge_index.shape, edge_attr.shape) return data def sample(self, theta, Y): theta_d = theta.shape[1] theta", "self.m = m self.l = l self.radius = radius assert self.n % self.m", "m=200, l=1, radius=0.25): super(RandomGridSplitter, self).__init__() self.grid = grid self.resolution = resolution self.n =", "l in enumerate(self.layers): x = l(x) return x class DenseNet_sin(torch.nn.Module): def __init__(self, layers,", "mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T def ball_connectivity(self, r): pwd =", "ys = np.array(range(n_y)) grid = np.vstack([xx.ravel() for xx in np.meshgrid(xs, ys)]).T edge_index =", "= (x * std) + mean return x def cuda(self): self.mean = self.mean.cuda()", "if self.size_average: return torch.mean(diff_norms/y_norms) else: return torch.sum(diff_norms/y_norms) return diff_norms/y_norms def __call__(self, x, y):", "= grid(h_y_l, h_x_l) elif grid == 'grid_edge': X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l,", "# construct X # if (is_high): # X = torch.cat([torch.zeros(n_l, l * 2),", "np.stack([Y_diff0, Y_diff1, Y_diff2, Y_diff3, Y_diff4], axis=2) pwd = np.min(PWD, axis=2) pwd_index = np.argmin(PWD,", "out_nonlinearity=None, normalize=False): super(DenseNet_sin, self).__init__() self.n_layers = len(layers) - 1 assert self.n_layers >= 1", "def get_data(self, theta, edge_features=1): data = [] for i in range(self.l): perm =", "= self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) edge_attr_22[:, 2 * self.d] = theta[self.edge_index_22[0]] edge_attr_22[:, 2 * self.d", "in range(batch_size2): pred_ij = pred_i[j,:] x, y = split_idx_i[j] if self.resolution%2==1: if x==0:", "+ 1] = theta[self.edge_index_up[l][1]] self.edge_attr_up.append(torch.tensor(edge_attr_up, dtype=torch.float)) edge_attr_out = torch.cat(self.edge_attr, dim=0) edge_attr_down_out = torch.cat(self.edge_attr_down,", "assert len(pred) == len(split_idx) assert len(pred) == self.num * self.l // batch_size2 out", "= [] self.n_edges_inner = [] self.n_edges_inter = [] def sample(self): self.idx = []", "[] for x in range(n_x): i = x i1 = (x+1)%n_x edge_index.append((i, i1))", "postive assert d > 0 and p > 0 self.d = d self.p", "r): grid = self.grid_sample pwd0 = sklearn.metrics.pairwise_distances(grid, grid) grid1 = grid grid1[:,0] =", "= torch.cat([grid_sub, grid_sample],dim=0) theta_split = torch.cat([theta_sub, theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges =", "if theta is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges, -1)) else: theta = theta[self.idx] edge_attr", "grid[:, 1] - 1 pwd4 = sklearn.metrics.pairwise_distances(grid, grid4) X_diff4, Y_diff4 = self.pairwise_difference(grid, grid4)", "grid_out = [] for grid in self.grid_sample: grid_out.append(torch.tensor(grid, dtype=torch.float)) return grid_out, torch.tensor(self.grid_sample_all, dtype=torch.float)", "self.n_edges = self.edge_index.shape[1] if is_forward: print(self.edge_index.shape) self.edge_index = self.edge_index[:, self.edge_index[0] >= self.edge_index[1]] print(self.edge_index.shape)", "np.zeros((self.n_edges_inter[l], 2 * self.d + 2)) edge_attr_up = np.zeros((self.n_edges_inter[l], 2 * self.d +", "edge_attr.append((x / n_x, a2, a1)) X = torch.tensor(xs, dtype=torch.float) # Exact = torch.tensor(Exact,", "= torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def pairwise_difference(self,grid1, grid2): n = grid1.shape[0] x1 = grid1[:,0]", "= [] for x_i in range(s_l): for x in range(-3,4): x_j = x_i", "n_l = s_l print('level',s_l,r_l,n_l) xs = np.linspace(0.0, 1.0, s_l) grid_l = xs grid_l", "sklearn.metrics.pairwise_distances(self.grid) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def", "n_y): xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y) # xs", "np.repeat(vertice1, boundary_size) vertice2 = np.tile(boundary, self.n) self.edge_index_boundary = np.stack([vertice2, vertice1], axis=0) self.n_edges_boundary =", "self.edge_index_22 = np.vstack(np.where(pwd22 <= r22)) + self.m self.n_edges = self.edge_index.shape[1] self.n_edges_12 = self.edge_index_12.shape[1]", "* self.d + 1] = theta[self.edge_index_down[l][1]] self.edge_attr_down.append(torch.tensor(edge_attr_down, dtype=torch.float)) edge_attr_up[:, 0:2 * self.d] =", "s - 1, n, s)) self.boundary = np.concatenate([boundary1, boundary2, boundary3, boundary4]) def boundary_connectivity2d(self,", "dtype=torch.float) return X, edge_index, edge_attr def multi_grid(depth, n_x, n_y, grid, params): edge_index_global =", "* self.d] = self.grid_sample_all[self.edge_index[l].T].reshape( (self.n_edges_inner[l], 2 * self.d)) edge_attr[:, 2 * self.d] =", "= self.grid[self.edge_index.T].reshape((self.n_edges,-1)) edge_attr[:, 2 * self.d] = theta[self.edge_index[0]] edge_attr[:, 2 * self.d +1]", "2) theta_sub = theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub = Y[:,x::self.r, y::self.r].reshape(self.T,-1) index_sub =", "== level assert len(mesh_size) == self.d if self.d == 1: self.n = mesh_size[0]", "% self.n index_end = (index+self.ms[l]) % self.n if index < index_end: idx =", "self.d] = theta[self.edge_index_boundary[0]] edge_attr_boundary[:, 2 * self.d +1] = theta[self.edge_index_boundary[1]] else: xy =", "= self.grid_sample pwd0 = sklearn.metrics.pairwise_distances(grid, grid) grid1 = grid grid1[:,0] = grid[:,0]+1 pwd1", "4+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4 +", "= sklearn.metrics.pairwise_distances(grid, grid) X_diff0, Y_diff0 = self.pairwise_difference(grid, grid) grid1 = grid grid1[:,0] =", "/ n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x, a2, a1)) if (y != n_y -", "x, sample_idx=None): if sample_idx is None: std = self.std + self.eps # n", "# give a test mesh, generate a list of data data = []", "return diff_norms/y_norms def __call__(self, x, y): return self.rel(x, y) # A simple feedforward", "self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T def ball_connectivity(self, r):", "= int(((resolution - 1)/r) + 1) else: self.s = int(resolution/r) self.r = r", "self.T = T self.radius = radius self.edge_features = edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution,", "2), X, torch.zeros(n_l, (depth - 1 - l) * 2)], dim=1) # else:", "self.p, 1) y_norms = torch.norm(y.reshape(num_examples,-1), self.p, 1) if self.reduction: if self.size_average: return torch.mean(diff_norms/y_norms)", "= sklearn.metrics.pairwise_distances(self.grid_sample_i) self.edge_index = np.vstack(np.where(pwd <= r11)) self.edge_index_12 = np.vstack(np.where(pwd12 <= r12)) self.edge_index_12[1,:]", "* self.d +1] = theta[self.edge_index[1]] else: xy = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) if theta is None:", "n)) boundary3 = np.array(range(s, n, s)) boundary4 = np.array(range(2 * s - 1,", "self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] grid_split = torch.cat([grid_sub, grid_sample],dim=0) theta_split = torch.cat([theta_sub, theta_sample],dim=0) X", "get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float) def ball_connectivity(self, r, is_forward=False): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) self.edge_index =", "= self.index[x::self.r, y::self.r].reshape(-1,) n_sub = Y_sub.shape[0] if self.m >= n_sub: m = self.m", "radius=0.15, edge_features=1): super(DownsampleGridSplitter, self).__init__() # instead of randomly sample sub-grids, here we downsample", "= torch.tensor(edge_attr, dtype=torch.float) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=idx)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return", "= 1 / n_x a1 = a[x, y] a2 = a[x + 1,", "0.01) ** 2))) edge_index.append((i + n_x, i)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1", "edge_attr_global.append(edge_attr_inner) # construct X # if (is_high): # X = torch.cat([torch.zeros(n_l, l *", "Y_split = torch.cat([Y_sub, Y_sample], dim=0).reshape(-1,) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1,) X = torch.cat([grid_split,", "pred_ij = pred_i[j,:].reshape(-1,) idx = split_idx_i[j,:].reshape(-1,) out[idx] = out[idx] + pred_ij out =", "self.m) % self.n grid, grid_all = self.get_grid() edge_index, edge_index_down, edge_index_up = self.ball_connectivity(radius_inner, radius_inter)", "__init__(self, grid, resolution, r, m=100, radius=0.15, T=None, edge_features=1, ): super(TorusGridSplitter, self).__init__() self.grid =", "is_forward: print(self.edge_index.shape) self.edge_index = self.edge_index[:, self.edge_index[0] >= self.edge_index[1]] print(self.edge_index.shape) self.n_edges = self.edge_index.shape[1] return", "grid2[:,0] y2 = grid2[:,1] X1 = np.tile(x1.reshape(n, 1), [1, n]) X2 = np.tile(x2.reshape(1,", "!= n_y - 1): d = 1 / n_y a1 = a[x, y]", "return self.idx def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float) def ball_connectivity(self, r, is_forward=False): pwd =", "T*batch*n x = (x * std) + mean return x def cuda(self): self.mean", "in range(s_l): for x in range(-3,4): x_j = x_i + x # if", "else: xy = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) if theta is None: edge_attr = f(xy[:,0:self.d], xy[:,self.d:]) else:", "= [] for i in range(self.l): perm = torch.randperm(self.n) perm = perm.reshape(self.num, self.m)", "= Y_DIFF[PWD_index] n_edges = edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges, distance, X_difference, Y_difference def", "of sub-grid if self.splits * self.m < self.n: self.splits = self.splits + 1", "self.d + 2*self.attr_features] = theta[self.edge_index[1]].view(-1, self.attr_features) else: xy = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) if theta", "grid_size) data = data[:, ::l, ::l] data = data.reshape(-1, (grid_size // l) **", "2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i + n_x, i)) edge_attr.append((d, a2, a1,", "X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3 + self.edge_features * 2)) a", "0)[0].view(-1) self.a = (high - low)/(mymax - mymin) self.b = -self.a*mymax + high", "generate graphs on square domain class SquareMeshGenerator(object): def __init__(self, real_space, mesh_size): super(SquareMeshGenerator, self).__init__()", "y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) else: data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx,", "Y_sample], dim=0).reshape(-1,) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) else:", "x==0: nx = self.s else: nx = self.s-1 if y==0: ny = self.s", "for xx in np.meshgrid(*grids)]).T def ball_connectivity(self, r): pwd = sklearn.metrics.pairwise_distances(self.grid) self.edge_index = np.vstack(np.where(pwd", "(x * (self.std + self.eps)) + self.mean return x def cuda(self): self.mean =", "self.edge_index = np.vstack(np.where(sample)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def attributes(self, f=None, theta=None):", "r)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid)", "1)/r) + 1) else: self.s = int(resolution/r) self.r = r self.n = resolution**2", "i = x i1 = (x+1)%n_x edge_index.append((i, i1)) edge_index.append((i1, i )) i2 =", "!= n_x - 1): d = 1 / n_x a1 = a[x, y]", "0:self.d], xy[:, self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) # # generate two-level graph", "= [] self.grid_sample = [] if (new_sample) or (self.perm is None): self.perm =", "self).__init__() self.n_layers = len(layers) - 1 assert self.n_layers >= 1 self.layers = nn.ModuleList()", "normalize=False): super(DenseNet, self).__init__() self.n_layers = len(layers) - 1 assert self.n_layers >= 1 self.layers", "self.idx_i = self.idx self.idx_both = self.idx self.grid_sample = self.grid self.grid_sample_i = self.grid self.grid_sample_both", "= self.n // self.m # number of sub-grid def get_data(self, theta, edge_features=1): data", "edge_attr_12 = np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_12[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1))", "= Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split, params=params) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape,", "index_sub = self.index[x::self.r, y::self.r].reshape(-1, ) n_sub = Y_sub.shape[1] if self.m >= n_sub: m", "+ x if (x != n_x - 1): d = 1 / n_x", "y+1] edge_attr.append((x/n_x, y/n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x, a2, a1)) X = torch.tensor(grid, dtype=torch.float)", "edge_index_down_range[l, 1] = n_edge_index edge_index_up_range[l, 1] = n_edge_index return edge_index_range, edge_index_down_range, edge_index_up_range def", "data.reshape(-1, grid_size, grid_size) data = data[:, ::l, ::l] data = data.reshape(-1, (grid_size //", "boundary1 = np.array(range(0, s)) boundary2 = np.array(range(n - s, n)) boundary3 = np.array(range(s,", "2*self.attr_features] = theta[self.edge_index[1]].view(-1, self.attr_features) else: xy = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) if theta is None:", "= np.array(range(self.n)) self.idx_i = self.idx self.idx_both = self.idx self.grid_sample = self.grid self.grid_sample_i =", "1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge(n_x, n_y, a=None):", "torch.std(x, 0) self.eps = eps def encode(self, x): x = (x - self.mean)", "edge_index_inter = torch.cat([edge_index_inter1, edge_index_inter2], dim=1) edge_attr_inter1 = torch.tensor((0, 0, 1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter2", "grid grid2[:, 1] = grid[:, 1] + 1 pwd2 = sklearn.metrics.pairwise_distances(grid, grid2) X_diff2,", "s)) boundary2 = np.array(range(n - s, n)) boundary3 = np.array(range(s, n, s)) boundary4", "X_diff2, Y_diff2 = self.pairwise_difference(grid, grid2) grid3 = grid grid3[:, :] = grid[:, :]", "a1)) if (y != n_y - 1): d = 1 / n_y edge_index.append((i,", "theta.reshape(resolution, resolution,-1) # self.y = y.reshape(resolution, resolution,1) self.resolution = resolution if resolution%2==1: self.s", "data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) else: data = Data(x=X, y=Y_split,", "= self.mean[sample_idx] if len(self.mean.shape) > len(sample_idx[0].shape): std = self.std[:,sample_idx]+ self.eps # T*batch*n mean", "0)[0].view(-1) mymax = torch.max(x, 0)[0].view(-1) self.a = (high - low)/(mymax - mymin) self.b", "self.grid[self.idx_i] self.grid_sample_both = self.grid[self.idx_both] return self.idx, self.idx_i, self.idx_both def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float),", "T self.radius = radius self.edge_features = edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def", "edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges, distance, X_difference, Y_difference def get_data(self, theta, params=None): theta_d", "0.01) ** 2))) if (y != n_y - 1): d = 1 /", "pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for j in range(batch_size2): pred_ij =", "len(real_space) self.s = mesh_size[0] assert len(mesh_size) == self.d if self.d == 1: self.n", "for y in range(n_y): for x in range(n_x): i = y * n_x", "split_idx_i[j,:].reshape(-1,) out[idx] = out[idx] + pred_ij out = out / self.l # out", "self.mean return x def cuda(self): self.mean = self.mean.cuda() self.std = self.std.cuda() def cpu(self):", "torch.tensor(self.grid_sample_all, dtype=torch.float) def ball_connectivity(self, radius_inner, radius_inter): assert len(radius_inner) == self.level assert len(radius_inter) ==", "i )) i2 = (x + 2) % n_x edge_index.append((i, i2)) edge_index.append((i2, i", "for l in range(self.level): self.idx.append(perm[index: index+self.m[l]]) self.grid_sample.append(self.grid[self.idx[l]]) index = index+self.m[l] self.idx_all = perm[:index]", "if (xi, yi), (xj, yj) not NearestNeighbor if abs(x)>=2: # if their parents", "* ny] out[x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='constant',", "= xs grid_l = torch.tensor(grid_l, dtype=torch.float) print(grid_l.shape) grid_list.append(grid_l) theta_l = theta[:,:,:theta_d].reshape(N, s, theta_d)", "edge_attr_up[:, 2 * self.d] = theta[self.edge_index_up[l][0]] edge_attr_up[:, 2 * self.d + 1] =", "edge_attr_boundary[:, 2 * self.d +1] = theta[self.edge_index_boundary[1]] else: xy = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) if theta", "radius assert self.n % self.m == 0 self.num = self.n // self.m #", "self.edge_index_down[l].shape[1] edge_index_down_range[l, 1] = n_edge_index edge_index_up_range[l, 1] = n_edge_index return edge_index_range, edge_index_down_range, edge_index_up_range", "0.1) ** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i + n_x, i)) edge_attr.append((d,", ":2*self.d] = grid_sample[edge_index.T].reshape(n_edges, -1) edge_attr[:, 2*self.d] = a[edge_index[0]] edge_attr[:, 2*self.d+1] = a[edge_index[1]] edge_attr", "np.meshgrid(xs, ys)]).T edge_index = [] edge_attr = [] for i1 in range(n): x1", "dtype=torch.long).reshape(self.resolution, self.resolution) def pairwise_difference(self,grid1, grid2): n = grid1.shape[0] x1 = grid1[:,0] y1 =", "idx_all self.grid_sample_all = self.grid[self.idx_all] return self.idx, self.idx_all def get_grid(self): grid_out = [] for", "** 2) return data def simple_grid(n_x, n_y): xs = np.linspace(0.0, 1.0, n_x) ys", "self.edge_attr.append(torch.tensor(edge_attr, dtype=torch.float)) for l in range(self.level - 1): edge_attr_down = np.zeros((self.n_edges_inter[l], 2 *", "None: # self.boundary_connectivity2d() if f is None: if theta is None: edge_attr_boundary =", "in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for j in range(batch_size2):", "a2 = a[x + 1] edge_attr.append((x / n_x, a1, a2)) edge_attr.append((x / n_x,", "* ny] out[x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='wrap')", "# if their parents are NN if abs(x_i//2 - x_j//2)%(s_l//2) <=1: edge_index_inter.append([x_i,x_j]) edge_index_inter", "a2, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d / 0.1) **", "# number of sub-grid def get_data(self, theta, edge_features=1): data = [] for i", "x_i in range(s_l): for x in (-1,1): x_j = x_i + x if", "self.file_path = file_path self.data = None self.old_mat = None self._load_file() def _load_file(self): try:", "n_edge_index return edge_index_range, edge_index_down_range, edge_index_up_range def attributes(self, theta=None): self.edge_attr = [] self.edge_attr_down =", "= sklearn.metrics.pairwise_distances(grid_sample) edge_index = np.vstack(np.where(pwd <= self.radius)) n_edges = edge_index.shape[1] edge_index = torch.tensor(edge_index,", "n_y, a=None): if a != None: a = a.reshape(n_x, n_y) xs = np.linspace(0.0,", "pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l], self.grid_sample[l+1]) edge_index = np.vstack(np.where(pwd <= radius_inter[l])) + index edge_index[1, :]", "- 1 - l) * 2)], dim=1) # else: # X_l = torch.tensor(l,", ") n_sub = Y_sub.shape[1] if self.m >= n_sub: m = self.m - n_sub", "(x != n_x - 1): d = 1 / n_x edge_index.append((i, i +", "- 1 pwd4 = sklearn.metrics.pairwise_distances(grid, grid4) X_diff4, Y_diff4 = self.pairwise_difference(grid, grid4) PWD =", "self.grid_sample_all[self.edge_index_up[l].T].reshape((self.n_edges_inter[l], 2*self.d)) self.edge_attr_down.append(torch.tensor(edge_attr_down)) self.edge_attr_up.append(torch.tensor(edge_attr_up)) else: theta = theta[self.idx_all] for l in range(self.level): edge_attr", "2 * self.d] = theta[self.edge_index_up[l][0]] edge_attr_up[:, 2 * self.d + 1] = theta[self.edge_index_up[l][1]]", "torch.zeros((self.level,2), dtype=torch.long) edge_index_down_range = torch.zeros((self.level-1,2), dtype=torch.long) edge_index_up_range = torch.zeros((self.level-1,2), dtype=torch.long) n_edge_index = 0", "(l - 1) s_l = s // r_l n_l = s_l print('level',s_l,r_l,n_l) xs", "l)) if grid == 'grid': X, edge_index_inner, edge_attr_inner = grid(h_y_l, h_x_l) elif grid", "torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def pairwise_difference(self,grid1, grid2): n = grid1.shape[0] x1 = grid1[:,0] y1", "edge_index = np.vstack(np.where(pwd <= radius_inter[l])) + index edge_index[1, :] = edge_index[1, :] +", "ny] out[x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='wrap') out", "self.edge_attr_down = [] self.edge_attr_up = [] self.n_edges_inner = [] self.n_edges_inter = [] def", "self.m = self.n self.idx = np.array(range(self.n)) self.idx_i = self.idx self.idx_both = self.idx self.grid_sample", "edge_index_out.append(torch.tensor(edge_index, dtype=torch.long)) self.n_edges_inner.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0] index = 0 for l", "None: edge_attr_boundary = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) else: edge_attr_boundary = np.zeros((self.n_edges_boundary, 2*self.d+2)) edge_attr_boundary[:,0:2*self.d] = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) edge_attr_boundary[:,", "1) edge_attr_inter = torch.cat([edge_attr_inter1, edge_attr_inter2], dim=0) edge_index_global.append(edge_index_inter) edge_attr_global.append(edge_attr_inter) X = torch.cat(X_global, dim=0) edge_index", "abs(self, x, y): num_examples = x.size()[0] #Assume uniform mesh h = 1.0 /", "self.b x = x.view(s) return x def decode(self, x): s = x.size() x", "= grid_sample[edge_index.T].reshape(n_edges, -1) else: edge_attr = np.zeros((n_edges, 2*self.d+2)) a = theta_sample[:,0] edge_attr[:, :2*self.d]", "edge_index_list.append(edge_index_inter) edge_index_list_cuda.append(edge_index_inter.cuda()) print('edge_inter', edge_index_inter.shape) print(len(grid_list),len(edge_index_list),len(theta_list)) return grid_list, theta_list, edge_index_list, edge_index_list_cuda def get_edge_attr(grid, theta,", "edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def pairwise_difference(self,grid1, grid2): n = grid1.shape[0] x1", "mesh_size[0] assert len(mesh_size) == self.d if self.d == 1: self.n = mesh_size[0] self.grid", "+ 1 print('n:',self.n,' m:',self.m, ' number of splits:', self.splits ) self.perm = None", "of splits:', self.splits ) self.perm = None self.idx = [] self.idx_all = None", "= r self.n = resolution**2 self.m = m self.T = T self.radius =", "y] edge_attr.append((x / n_x, y / n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x, a2, a1))", "grid1) grid2 = grid grid2[:, 1] = grid[:, 1] + 1 pwd2 =", "torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge_aug_full(n_x, n_y, r, a): n =", "theta = theta[self.idx] edge_attr = f(xy[:, 0:self.d], xy[:, self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr,", "= torch.tensor(out, dtype=torch.float) return out.reshape(-1,) def assembleT(self, pred, split_idx, batch_size2, sigma=1): # pred", "numpy as np import scipy.io import h5py import sklearn.metrics from torch_geometric.data import Data", "int(resolution/r) self.r = r self.n = resolution**2 self.m = m self.T = T", "-1)[idx] Y_sample = Y.reshape(self.T, self.n)[:,idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0) theta_split = torch.cat([theta_sub,", "= torch.randint(0,self.r,(1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub = theta[x::self.r, y::self.r, :].reshape(-1,", "torch.norm(y.reshape(num_examples,-1), self.p, 1) if self.reduction: if self.size_average: return torch.mean(diff_norms/y_norms) else: return torch.sum(diff_norms/y_norms) return", "+ nx * ny] out[t, x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out =", "1] = theta[self.edge_index_21[1]] edge_attr_22 = np.zeros((self.n_edges_22, 3 * self.d)) edge_attr_22[:, 0:2 * self.d]", "np.meshgrid(*grids)]).T self.idx = [] self.idx_all = None self.grid_sample = [] self.grid_sample_all = None", "index_split = index_sub.reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges, distance, X_difference, Y_difference", "edge_index.shape, edge_attr.shape, mask_index.shape) return (X, edge_index, edge_attr, mask_index, num_nodes) def multi_pole_grid1d(theta, theta_d, s,", "X1 = np.tile(x1.reshape(n, 1), [1, n]) X2 = np.tile(x2.reshape(1, n), [n, 1]) X_diff", "T=None, edge_features=1, ): super(TorusGridSplitter, self).__init__() self.grid = grid.reshape(resolution, resolution,2) # self.theta = theta.reshape(resolution,", "= np.vstack(np.where(pwd <= radius_inter[l])) + index edge_index[1, :] = edge_index[1, :] + self.grid_sample[l].shape[0]", "theta[self.edge_index_up[l][1]] self.edge_attr_up.append(torch.tensor(edge_attr_up, dtype=torch.float)) edge_attr_out = torch.cat(self.edge_attr, dim=0) edge_attr_down_out = torch.cat(self.edge_attr_down, dim=0) edge_attr_up_out =", "n_y) xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y) # xs", "except: self.data = h5py.File(self.file_path) self.old_mat = False def load_file(self, file_path): self.file_path = file_path", "= T self.radius = radius self.edge_features = edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution)", "a): n = n_x * n_y xs = np.linspace(0.0, 1.0, n_x) ys =", "torch.tensor(theta_l, dtype=torch.float) print(theta_l.shape) theta_list.append(theta_l) # for the finest level, we construct the nearest", "1] = theta[self.edge_index_up[l][1]] self.edge_attr_up.append(torch.tensor(edge_attr_up, dtype=torch.float)) edge_attr_out = torch.cat(self.edge_attr, dim=0) edge_attr_down_out = torch.cat(self.edge_attr_down, dim=0)", "(np.where(pwd <= self.radius)[0], np.where(pwd <= self.radius)[1], pwd_index) distance = PWD[PWD_index] X_difference = X_DIFF[PWD_index]", "for xx in np.meshgrid(xs, ys)]).T edge_index = [] edge_attr = [] for i1", "torch.tensor([x, y], dtype=torch.long).reshape(1, 2) data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) print('train',", "index = index_end if index0 < index_end: idx_all = self.perm[index0: index_end] else: idx_all", "x): for _, l in enumerate(self.layers): x = l(x) return x class DenseNet_sin(torch.nn.Module):", "= [] for i1 in range(n): x1 = grid[i1] for i2 in range(n):", "torch.tensor(edge_index, dtype=torch.long), n_edges, distance, X_difference, Y_difference def get_data(self, theta, params=None): theta_d = theta.shape[1]", ">= n_sub: m = self.m - n_sub perm = torch.randperm(self.n) idx = perm[:m]", "- Y2 return X_diff, Y_diff def torus_connectivity(self, grid): pwd0 = sklearn.metrics.pairwise_distances(grid, grid) X_diff0,", "torch_geometric.data import Data import torch.nn as nn from scipy.ndimage import gaussian_filter ################################################# #", "a[edge_index[0]] edge_attr[:, 2*self.d+1] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=idx))", "== len(split_idx) assert len(pred) == self.r**2 // batch_size2 out = torch.zeros((self.resolution,self.resolution)) for i", "-1) edge_attr[:, 4:4 + self.edge_features] = a[edge_index[0]] edge_attr[:, 4 + self.edge_features: 4 +", "edge_attr = np.zeros((n_edges, 4)) edge_attr[:, 0:2] = grid[edge_index.transpose(0,1)].reshape((n_edges, -1)) edge_attr[:, 2] = theta[edge_index[0]]", "grid grid1[:,0] = grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) PWD = np.stack([pwd0,pwd1], axis=2) pwd", "a = a.reshape(n_x) xs = np.linspace(0.0, 1.0, n_x) # xs = np.array(range(n_x)) #", "len(mesh_size) == self.d if self.d == 1: self.n = mesh_size[0] self.grid = np.linspace(real_space[0][0],", "self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] X = torch.cat([grid_sample,theta_sample],dim=1) pwd = sklearn.metrics.pairwise_distances(grid_sample) edge_index = np.vstack(np.where(pwd", "to_float): self.to_float = to_float # normalization, pointwise gaussian class UnitGaussianNormalizer(object): def __init__(self, x,", "1] = n_edge_index n_edge_index = 0 for l in range(self.level-1): edge_index_down_range[l, 0] =", "theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.T, self.resolution, self.resolution) x = torch.randint(0, self.r, (1,))", "torch.tensor(self.edge_index_21, dtype=torch.long), \\ torch.tensor(self.edge_index_22, dtype=torch.long) def attributes(self, theta=None): if theta is None: edge_attr", "= perm[j,:].reshape(-1,) grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] X = torch.cat([grid_sample,theta_sample],dim=1) pwd =", "edge_attr_up[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_up[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_up[:, 2 *", "axis = 0).repeat(2, axis = 1) index2 = torch.tensor(index2).reshape(-1) index2 = index2 +", "= (x * (self.std + self.eps)) + self.mean return x def cuda(self): self.mean", "self.d)) edge_attr_12[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_12[:, 2 * self.d] =", "data = [] for i in range(self.l): perm = torch.randperm(self.n) perm = perm.reshape(self.num,", "n_x, a1, a2)) edge_attr.append((x / n_x, a2, a1)) X = torch.tensor(xs, dtype=torch.float) #", "self.splits * self.m < self.n: self.splits = self.splits + 1 print('n:',self.n,' m:',self.m, '", "= np.zeros((self.n_edges_boundary, 2*self.d+2)) edge_attr_boundary[:,0:2*self.d] = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) edge_attr_boundary[:, 2 * self.d] = theta[self.edge_index_boundary[0]] edge_attr_boundary[:,", "for j in range(self.d): grids.append(np.linspace(real_space[j][0], real_space[j][1], mesh_size[j])) self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel()", "= sklearn.metrics.pairwise_distances(grid, grid3) X_diff3, Y_diff3 = self.pairwise_difference(grid, grid3) grid4 = grid grid4[:, 0]", "dtype=torch.long) edge_index_nn = edge_index_nn.transpose(0,1) edge_index_list.append(edge_index_nn) edge_index_list_cuda.append(edge_index_nn.cuda()) print('edge', edge_index_nn.shape) # we then compute the", "radius_inter) edge_index_range, edge_index_down_range, edge_index_up_range = self.get_edge_index_range() edge_attr, edge_attr_down, edge_attr_up = self.attributes(theta=theta_a) x =", "torch.cat(self.edge_attr_up, dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out def splitter(self, radius_inner, radius_inter, theta_a, theta_all): #", "print(theta_l.shape) theta_list.append(theta_l) # for the finest level, we construct the nearest neighbors (NN)", "theta[self.edge_index[0]].view(-1, self.attr_features) edge_attr[:, 2 * self.d + self.attr_features: 2 * self.d + 2*self.attr_features]", "grid4[:, 1] = grid[:, 1] - 1 pwd4 = sklearn.metrics.pairwise_distances(grid, grid4) X_diff4, Y_diff4", "edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split, params=params) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data", "== len(sample_idx[0].shape): std = self.std[sample_idx] + self.eps # batch*n mean = self.mean[sample_idx] if", "= torch.max(x, 0)[0].view(-1) self.a = (high - low)/(mymax - mymin) self.b = -self.a*mymax", "mesh_size[j])) self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T self.splits =", "y::self.r,:].reshape(-1,theta_d) perm = torch.randperm(self.n) m = self.m - grid_sub.shape[0] idx = perm[:m] grid_sample", "self.splits = self.splits + 1 print('n:',self.n,' m:',self.m, ' number of splits:', self.splits )", "theta_sample = theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.n, )[idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0)", "y], dtype=torch.long).reshape(1, 2) data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) print('train', X.shape,", "of each level edge_index_range = torch.zeros((self.level,2), dtype=torch.long) edge_index_down_range = torch.zeros((self.level-1,2), dtype=torch.long) edge_index_up_range =", "data = data[:, ::l, ::l] data = data.reshape(-1, (grid_size // l) ** 2)", "self.grid[self.edge_index.T].reshape((self.n_edges,-1)) if theta is None: edge_attr = f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr = f(xy[:,0:self.d],", "= a[x, y] a2 = a[x, y+1] edge_index.append((i, i + n_x)) edge_attr.append((d, a1,", "self.radius)) n_edges = edge_index.shape[1] edge_index = torch.tensor(edge_index, dtype=torch.long) if edge_features == 0: edge_attr", "= Y_sub.shape[0] if self.m >= n_sub: m = self.m - n_sub perm =", "self.idx = [] self.grid_sample = [] perm = torch.randperm(self.n) index = 0 for", "= index1 + num_nodes num_nodes += n_l # #construct inter-graph edge if l", "sub-grid if self.splits * self.m < self.n: self.splits = self.splits + 1 print('n:',self.n,'", "= idx_all self.grid_sample_all = self.grid[self.idx_all] return self.idx, self.idx_all def get_grid(self): grid_out = []", "theta_l.reshape(N, n_l, theta_d) theta_l = torch.tensor(theta_l, dtype=torch.float) print(theta_l.shape) theta_list.append(theta_l) # for the finest", "n = self.n boundary1 = np.array(range(0, s)) boundary2 = np.array(range(n - s, n))", "torch.tensor(edge_attr, dtype=torch.float) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=idx)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data", "grid_sample],dim=0) theta_split = torch.cat([theta_sub, theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr", "self.radius)) pwd_index = pwd_index[np.where(pwd <= self.radius)] PWD_index = (np.where(pwd <= self.radius)[0], np.where(pwd <=", "2 * self.d] = theta[self.edge_index[0]] edge_attr[:, 2 * self.d + 1] = theta[self.edge_index[1]]", "= torch.tensor(edge_index, dtype=torch.long) if edge_features == 0: edge_attr = grid_sample[edge_index.T].reshape(n_edges, -1) else: edge_attr", "edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def", "dim=0) theta_split = torch.cat([theta_sub, theta_sample], dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=1).reshape(self.T,-1) index_split =", "a2)) edge_attr.append((x / n_x, a2, a1)) X = torch.tensor(xs, dtype=torch.float) # Exact =", "/ 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i + 1, i))", "== 'grid': X, edge_index_inner, edge_attr_inner = grid(h_y_l, h_x_l) elif grid == 'grid_edge': X,", "[] self.idx_all = None self.grid_sample = [] self.grid_sample_all = None self.edge_index = []", "* self.d + 1] = theta[self.edge_index[1]] edge_attr_12 = np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_12[:,", "\\ torch.tensor(edge_attr_22, dtype=torch.float) # generate multi-level graph class RandomMultiMeshGenerator(object): def __init__(self, real_space, mesh_size,", "grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) PWD = np.stack([pwd0,pwd1], axis=2) pwd = np.min(PWD, axis=2)", "None self.idx = [] self.idx_all = None self.grid_sample = [] self.grid_sample_all = None", "radius=0.15, T=None, edge_features=1, ): super(TorusGridSplitter, self).__init__() self.grid = grid.reshape(resolution, resolution,2) # self.theta =", "* 2), X, torch.zeros(n_l, (depth - 1 - l) * 2)], dim=1) #", "# for the finest level, we construct the nearest neighbors (NN) if l==1:", "l) * 2)], dim=1) # else: # X_l = torch.tensor(l, dtype=torch.float).repeat(n_l, 1) #", "theta_split = torch.cat([theta_sub, theta_sample], dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=0).reshape(-1,) index_split = torch.cat([index_sub,", "= x_i + x if is_periodic: x_j = x_j % s_l # if", "[] self.edge_index_up = [] self.n_edges_inner = [] self.n_edges_inter = [] edge_index_out = []", "1 pwd3 = sklearn.metrics.pairwise_distances(grid, grid3) X_diff3, Y_diff3 = self.pairwise_difference(grid, grid3) grid4 = grid", "two-level graph class RandomTwoMeshGenerator(object): def __init__(self, real_space, mesh_size, sample_size, induced_point): super(RandomTwoMeshGenerator, self).__init__() self.d", "torch.cat([X, X_l], dim=1) X_global.append(X) # construct edges index1 = torch.tensor(range(n_l), dtype=torch.long) index1 =", "= 1 grids = [] for j in range(self.d): grids.append(np.linspace(real_space[j][0], real_space[j][1], mesh_size[j])) self.n", "edge_attr def grid_edge_aug(n_x, n_y, a): a = a.reshape(n_x, n_y) xs = np.linspace(0.0, 1.0,", "= pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for j in range(batch_size2): pred_ij = pred_i[j,:]", "if sample_idx is None: std = self.std + self.eps # n mean =", "* self.d + 1] = theta[self.edge_index[l][1]] self.edge_attr.append(torch.tensor(edge_attr, dtype=torch.float)) for l in range(self.level -", "edge_index_range, edge_index_down_range, edge_index_up_range def attributes(self, theta=None): self.edge_attr = [] self.edge_attr_down = [] self.edge_attr_up", "= (index + self.m) % self.n grid, grid_all = self.get_grid() edge_index, edge_index_down, edge_index_up", "grid_split = grid_sub theta_split = theta_sub Y_split = Y_sub.reshape(self.T, -1) index_split = index_sub.reshape(-1,", "edge_index.append((i, i + 1)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d)", "1 pwd4 = sklearn.metrics.pairwise_distances(grid, grid4) X_diff4, Y_diff4 = self.pairwise_difference(grid, grid4) PWD = np.stack([pwd0,pwd1,pwd2,pwd3,pwd4],", "out = gaussian_filter(out, sigma=sigma, mode='constant', cval=0) # out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,)", "torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def ball_connectivity(self, grid): pwd = sklearn.metrics.pairwise_distances(grid) edge_index = np.vstack(np.where(pwd <=", "if normalize: self.layers.append(nn.BatchNorm1d(layers[j+1])) self.layers.append(nonlinearity()) if out_nonlinearity is not None: self.layers.append(out_nonlinearity()) def forward(self, x):", "index_split = index_sub.reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr", "theta_split], dim=1) edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3+self.edge_features*2))", "n_x, i)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2),", "edge_attr_inner = grid_edge(h_y_l, h_x_l, a) elif grid == 'grid_edge_aug': X, edge_index_inner, edge_attr_inner =", "self.edge_index_down = [] self.edge_index_up = [] self.edge_attr = [] self.edge_attr_down = [] self.edge_attr_up", "torch.zeros(n_l, (depth - 1 - l) * 2)], dim=1) # else: # X_l", "= a[edge_index[0]] edge_attr[:, 4 + self.edge_features: 4 + self.edge_features * 2] = a[edge_index[1]]", "], dim=1) data.append(Data(x=x, edge_index_mid=edge_index, edge_index_down=edge_index_down, edge_index_up=edge_index_up, edge_index_range=edge_index_range, edge_index_down_range=edge_index_down_range, edge_index_up_range=edge_index_up_range, edge_attr_mid=edge_attr, edge_attr_down=edge_attr_down, edge_attr_up=edge_attr_up, sample_idx=idx[0]))", "np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y) # xs = np.array(range(n_x)) #", "= torch.zeros(self.n, ).cuda() else: pred = torch.zeros(self.n, ) for i in range(self.splits): pred[sample_idx_list[i]]", "a[x, y] a2 = a[x + 1, y] edge_index.append((i, i + 1)) edge_attr.append((d,", ":self.edge_features] edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4+self.edge_features] = a[edge_index[0]] edge_attr[:, 4+self.edge_features: 4+self.edge_features*2]", "edge_attr[:, 4+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y],", "np.min(PWD, axis=2) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long)", "a != None: a = a.reshape(n_x) xs = np.linspace(0.0, 1.0, n_x) # xs", "theta_split], dim=1) else: grid_split = grid_sub theta_split = theta_sub Y_split = Y_sub.reshape(self.T, -1)", "def cuda(self): self.mean = self.mean.cuda() self.std = self.std.cuda() def cpu(self): self.mean = self.mean.cpu()", "return grid_out, torch.tensor(self.grid_sample_all, dtype=torch.float) def ball_connectivity(self, radius_inner, radius_inter): assert len(radius_inner) == self.level assert", "dim=0) self.idx_all = idx_all self.grid_sample_all = self.grid[self.idx_all] return self.idx, self.idx_all def get_grid(self): grid_out", "real_space, mesh_size, sample_size, attr_features=1): super(RandomMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_size self.attr_features", "= theta_sample[:,0] edge_attr[:, :2*self.d] = grid_sample[edge_index.T].reshape(n_edges, -1) edge_attr[:, 2*self.d] = a[edge_index[0]] edge_attr[:, 2*self.d+1]", "n_x - 1): d = 1 / n_x edge_index.append((i, i + 1)) edge_index.append((i", "index2 = torch.tensor(index2, dtype=torch.long) edge_index_inter1 = torch.cat([index1,index2], dim=-1).reshape(2,-1) edge_index_inter2 = torch.cat([index2,index1], dim=-1).reshape(2,-1) edge_index_inter", "theta=None): if f is None: if theta is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) else:", "= 1 / n_x edge_index.append((i, i + 1)) edge_index.append((i + 1, i ))", "+ self.eps # batch*n mean = self.mean[sample_idx] if len(self.mean.shape) > len(sample_idx[0].shape): std =", "Y, params=None): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.T,", "for j in range(self.num): idx = perm[j,:].reshape(-1,) grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx]", "edge_features=1): super(DownsampleGridSplitter, self).__init__() # instead of randomly sample sub-grids, here we downsample sub-grids", "theta[self.edge_index_down[l][1]] self.edge_attr_down.append(torch.tensor(edge_attr_down, dtype=torch.float)) edge_attr_up[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_up[l].T].reshape( (self.n_edges_inter[l], 2 * self.d))", "edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split, params=params) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def", "= torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr = np.zeros((n_edges, 4+self.edge_features*2)) a", "+ n_x)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2),", "self.mean = self.mean.cpu() self.std = self.std.cpu() # normalization, Gaussian class GaussianNormalizer(object): def __init__(self,", "np.zeros((n_edges, 3 + self.edge_features * 2)) a = theta_split[:, :self.edge_features] edge_attr[:, 0] =", "= False def load_file(self, file_path): self.file_path = file_path self._load_file() def read_field(self, field): x", "= torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2) if params==None: data =", "1] = theta[self.edge_index[1]] edge_attr_12 = np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_12[:, 0:2 * self.d]", "a2 = a[x, y+1] edge_index.append((i, i + n_x)) edge_attr.append((d, a1, a2, 1 /", "sklearn.metrics.pairwise_distances(self.grid_sample[l]) edge_index = np.vstack(np.where(pwd <= radius_inner[l])) + index self.edge_index.append(edge_index) edge_index_out.append(torch.tensor(edge_index, dtype=torch.long)) self.n_edges_inner.append(edge_index.shape[1]) index", "(x - self.b)/self.a x = x.view(s) return x #loss function with rel/abs Lp", "i in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i].reshape(batch_size2, self.m) for j", "<=1: edge_index_inter.append([x_i,x_j]) edge_index_inter = torch.tensor(edge_index_inter, dtype=torch.long) edge_index_inter = edge_index_inter.transpose(0,1) edge_index_list.append(edge_index_inter) edge_index_list_cuda.append(edge_index_inter.cuda()) print('edge_inter', edge_index_inter.shape)", "or (self.perm is None): self.perm = torch.randperm(self.n) index = index0 for l in", "= pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i].reshape(batch_size2, self.m) for j in range(batch_size2): pred_ij =", "+ 1, y] edge_index.append((i, i + 1)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1", "downsample(data, grid_size, l): data = data.reshape(-1, grid_size, grid_size) data = data[:, ::l, ::l]", "2 * self.d + 1] = theta[self.edge_index_up[l][1]] self.edge_attr_up.append(torch.tensor(edge_attr_up, dtype=torch.float)) edge_attr_out = torch.cat(self.edge_attr, dim=0)", ") X = torch.cat([grid_split, theta_split], dim=1) else: grid_split = grid_sub theta_split = theta_sub", "[] self.n_edges_inter = [] edge_index_out = [] edge_index_down_out = [] edge_index_up_out = []", "torch.cat(edge_index_up_out, dim=1) return edge_index_out, edge_index_down_out, edge_index_up_out def get_edge_index_range(self): # in order to use", "theta_sample = theta.reshape(self.n,-1)[idx] grid_split = torch.cat([grid_sub, grid_sample],dim=0) theta_split = torch.cat([theta_sub, theta_sample],dim=0) X =", "nx = self.s ny = self.s # pred_ij = pred_i[idx : idx +", "range(len(pred)): pred_i = pred[i][t].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for j in range(batch_size2): pred_ij", "i)) if a != None: a1 = a[x, y] a2 = a[x, y+1]", "edge_index): n_edges = edge_index.shape[1] edge_attr = np.zeros((n_edges, 4)) edge_attr[:, 0:2] = grid[edge_index.transpose(0,1)].reshape((n_edges, -1))", "multi_grid with size:', X.shape, edge_index.shape, edge_attr.shape, mask_index.shape) return (X, edge_index, edge_attr, mask_index, num_nodes)", "0, 1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter2 = torch.tensor((0, 0,-1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter = torch.cat([edge_attr_inter1,", "self.grid_sample_both = self.grid def sample(self): perm = torch.randperm(self.n) self.idx = perm[:self.m] self.idx_i =", "l): data = data.reshape(-1, grid_size, grid_size) data = data[:, ::l, ::l] data =", "X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges,", "return torch.tensor(self.edge_index, dtype=torch.long) def attributes(self, f=None, theta=None): if f is None: if theta", "size_average def abs(self, x, y): num_examples = x.size()[0] #Assume uniform mesh h =", "np.vstack(np.where(pwd <= self.radius)) n_edges = edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges def get_data(self, theta):", "/ 0.01) ** 2))) edge_index.append((i + n_x, i)) edge_attr.append((d, a2, a1, 1 /", "2)) edge_attr[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index[l].T].reshape( (self.n_edges_inner[l], 2 * self.d)) edge_attr[:, 2", "self.grid[self.edge_index.T].reshape((self.n_edges, -1)) else: theta = theta[self.idx] edge_attr = np.zeros((self.n_edges, 2 * self.d +", "= a[edge_index[0]] edge_attr[:, 2*self.d+1] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr,", "Y_difference = Y_DIFF[PWD_index] n_edges = edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges, distance, X_difference, Y_difference", "to_float # normalization, pointwise gaussian class UnitGaussianNormalizer(object): def __init__(self, x, eps=0.00001): super(UnitGaussianNormalizer, self).__init__()", "test mesh, generate a list of data data = [] index = 0", "self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub = theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub = Y[x::self.r,", "= [] self.edge_attr_down = [] self.edge_attr_up = [] if theta is None: for", "= torch.cat([grid_all, theta_all[idx_all,:] ], dim=1) data.append(Data(x=x, edge_index_mid=edge_index, edge_index_down=edge_index_down, edge_index_up=edge_index_up, edge_index_range=edge_index_range, edge_index_down_range=edge_index_down_range, edge_index_up_range=edge_index_up_range, edge_attr_mid=edge_attr,", "= (high - low)/(mymax - mymin) self.b = -self.a*mymax + high def encode(self,", "mesh_size[j])) self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T if self.m", "dtype=torch.long) edge_index_up_range = torch.zeros((self.level-1,2), dtype=torch.long) n_edge_index = 0 for l in range(self.level): edge_index_range[l,", "= theta[self.edge_index_12[1]] edge_attr_21 = np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_21[:, 0:2 * self.d] =", "cval=0) out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) # generate graph on Torus, with", "- 1)/r) + 1) else: self.s = int(resolution/r) self.r = r self.n =", "n_x, (2 ** l)) if grid == 'grid': X, edge_index_inner, edge_attr_inner = grid(h_y_l,", "grid_split = torch.cat([grid_sub, grid_sample],dim=0) theta_split = torch.cat([theta_sub, theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges", "mode='wrap') out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) def assembleT(self, pred, split_idx, batch_size2, sigma=1):", "self.n self.idx = np.array(range(self.n)) self.grid_sample = self.grid def sample(self): perm = torch.randperm(self.n) self.idx", "None self.edge_index = [] self.edge_index_down = [] self.edge_index_up = [] self.edge_attr = []", "super(RandomMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_size self.attr_features = attr_features assert len(mesh_size)", "self.l # out = gaussian_filter(out, sigma=sigma, mode='constant', cval=0) # out = torch.tensor(out, dtype=torch.float)", "self.resolution, theta_d) data = [] for x in range(self.r): for y in range(self.r):", "(x != n_x - 1): edge_index.append((i, i + 1)) edge_attr.append((1, 0, 0)) edge_index.append((i", "torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2) data = Data(x=X, y=Y_split, edge_index=edge_index,", "DownsampleGridSplitter(object): def __init__(self, grid, resolution, r, m=100, radius=0.15, edge_features=1): super(DownsampleGridSplitter, self).__init__() # instead", "resolution**2 self.m = m self.T = T self.radius = radius self.edge_features = edge_features", "edge_attr_inner = grid_edge(h_y_l, h_x_l, a) # update index edge_index_inner = edge_index_inner + num_nodes", "return torch.tensor(self.edge_index, dtype=torch.long) def gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid) rbf = np.exp(-pwd**2/sigma**2) sample", "is_cuda: pred = torch.zeros(self.n, ).cuda() else: pred = torch.zeros(self.n, ) for i in", "split_idx, batch_size2, sigma=1): assert len(pred) == len(split_idx) assert len(pred) == self.r**2 // batch_size2", "Y_sub = Y[x::self.r, y::self.r].reshape(-1,) index_sub = self.index[x::self.r, y::self.r].reshape(-1,) n_sub = Y_sub.shape[0] if self.m", "decode(self, x): s = x.size() x = x.view(s[0], -1) x = (x -", "xy = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) if theta is None: edge_attr = f(xy[:, 0:self.d], xy[:,", "= a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) if params==None: data.append(Data(x=X, edge_index=edge_index,", "edge_attr[:, 2*self.d] = a[edge_index[0]] edge_attr[:, 2*self.d+1] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) data.append(Data(x=X,", "X # if (is_high): # X = torch.cat([torch.zeros(n_l, l * 2), X, torch.zeros(n_l,", "= index_end if index0 < index_end: idx_all = self.perm[index0: index_end] else: idx_all =", "# if (xj, yj) is a valid node if is_periodic: x_j = x_j", "self.edge_index_12[1,:] + self.m self.edge_index_21 = self.edge_index_12[[1,0],:] self.edge_index_22 = np.vstack(np.where(pwd22 <= r22)) + self.m", "self.n_edges_22 = self.edge_index_22.shape[1] return torch.tensor(self.edge_index, dtype=torch.long), \\ torch.tensor(self.edge_index_12, dtype=torch.long), \\ torch.tensor(self.edge_index_21, dtype=torch.long), \\", "edge_attr = np.zeros((n_edges, 2*self.d+2)) a = theta_sample[:,0] edge_attr[:, :2*self.d] = grid_sample[edge_index.T].reshape(n_edges, -1) edge_attr[:,", "s = x.size() x = x.view(s[0], -1) x = self.a*x + self.b x", "torch.cat([grid_split, theta_split], dim=1) else: grid_split = grid_sub theta_split = theta_sub Y_split = Y_sub.reshape(self.T,", "n_x) ys = np.linspace(0.0, 1.0, n_y) grid = np.vstack([xx.ravel() for xx in np.meshgrid(xs,", "p > 0 self.d = d self.p = p self.reduction = reduction self.size_average", "x = torch.randint(0, self.r, (1,)) y = torch.randint(0, self.r, (1,)) grid_sub = self.grid[x::self.r,", "(self.n_edges_inner[l], 2 * self.d)) edge_attr[:, 2 * self.d] = theta[self.edge_index[l][0]] edge_attr[:, 2 *", "= np.tile(y2.reshape(1, n), [n, 1]) Y_diff = Y1 - Y2 return X_diff, Y_diff", "= torch.randperm(self.n) self.idx = perm[:self.m] self.grid_sample = self.grid[self.idx] return self.idx def get_grid(self): return", "np.exp(-(d / 0.01) ** 2))) edge_index.append((i + n_x, i)) edge_attr.append((d, a2, a1, 1", "x): s = x.size() x = x.view(s[0], -1) x = (x - self.b)/self.a", "X_diff0, Y_diff0 = self.pairwise_difference(grid, grid) grid1 = grid grid1[:,0] = grid[:,0]+1 pwd1 =", "self.edge_index[l].shape[1] edge_index_range[l, 1] = n_edge_index n_edge_index = 0 for l in range(self.level-1): edge_index_down_range[l,", "1) y_norms = torch.norm(y.reshape(num_examples,-1), self.p, 1) if self.reduction: if self.size_average: return torch.mean(diff_norms/y_norms) else:", "[] edge_index_out = [] edge_index_down_out = [] edge_index_up_out = [] index = 0", "def __init__(self, file_path, to_torch=True, to_cuda=False, to_float=True): super(MatReader, self).__init__() self.to_torch = to_torch self.to_cuda =", "their parents are NN but they are not NearestNeighbor edge_index_inter = [] for", "self.n_edges_inner.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0] index = 0 for l in range(self.level-1):", "self.n_edges = self.edge_index.shape[1] self.n_edges_12 = self.edge_index_12.shape[1] self.n_edges_22 = self.edge_index_22.shape[1] return torch.tensor(self.edge_index, dtype=torch.long), \\", "= torch.zeros(self.n, ) if cuda: out = out.cuda() for i in range(len(pred)): pred_i", "n_x, y / n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x, a2, a1)) if (y !=", "super(LpLoss, self).__init__() #Dimension and Lp-norm type are postive assert d > 0 and", "edge_index_down_range, edge_index_up_range = self.get_edge_index_range() edge_attr, edge_attr_down, edge_attr_up = self.attributes(theta=theta_a) x = torch.cat([grid_all, theta_all[idx_all,:]", "self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def attributes(self, f=None, theta=None): if f is", "def downsample(data, grid_size, l): data = data.reshape(-1, grid_size, grid_size) data = data[:, ::l,", "= sklearn.metrics.pairwise_distances(grid, grid1) X_diff1, Y_diff1 = self.pairwise_difference(grid, grid1) grid2 = grid grid2[:, 1]", "Y_diff def torus_connectivity(self, grid): pwd0 = sklearn.metrics.pairwise_distances(grid, grid) X_diff0, Y_diff0 = self.pairwise_difference(grid, grid)", "# update index edge_index_inner = edge_index_inner + num_nodes edge_index_global.append(edge_index_inner) edge_attr_global.append(edge_attr_inner) # construct X", "self.grid.reshape(self.n, -1)[idx] theta_sample = theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.n, )[idx] grid_split = torch.cat([grid_sub,", "0:self.d], xy[:, self.d:]) else: theta = theta[self.idx] edge_attr = f(xy[:, 0:self.d], xy[:, self.d:],", "r11)) self.edge_index_12 = np.vstack(np.where(pwd12 <= r12)) self.edge_index_12[1,:] = self.edge_index_12[1,:] + self.m self.edge_index_21 =", "radius_inter): assert len(radius_inner) == self.level assert len(radius_inter) == self.level - 1 self.edge_index =", ":] = edge_index[1, :] + self.grid_sample[l].shape[0] self.edge_index_down.append(edge_index) edge_index_down_out.append(torch.tensor(edge_index, dtype=torch.long)) self.edge_index_up.append(edge_index[[1,0],:]) edge_index_up_out.append(torch.tensor(edge_index[[1,0],:], dtype=torch.long)) self.n_edges_inter.append(edge_index.shape[1])", "theta[self.edge_index_up[l][0]] edge_attr_up[:, 2 * self.d + 1] = theta[self.edge_index_up[l][1]] self.edge_attr_up.append(torch.tensor(edge_attr_up, dtype=torch.float)) edge_attr_out =", "self.s ny = self.s # pred_ij = pred_i[idx : idx + nx *", "= self.std.cuda() def cpu(self): self.mean = self.mean.cpu() self.std = self.std.cpu() # normalization, scaling", "grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) X_diff1, Y_diff1 = self.pairwise_difference(grid, grid1) grid2 = grid", "self.boundary_connectivity2d() if f is None: if theta is None: edge_attr_boundary = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) else:", "def gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) rbf = np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf) self.edge_index", "= torch.cat([theta_sub, theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split)", "[] self.edge_attr_up = [] self.n_edges_inner = [] self.n_edges_inter = [] def sample(self): self.idx", "splits:', self.splits ) self.perm = None self.idx = [] self.idx_all = None self.grid_sample", "= sklearn.metrics.pairwise_distances(self.grid_sample[l]) edge_index = np.vstack(np.where(pwd <= radius_inner[l])) + index self.edge_index.append(edge_index) edge_index_out.append(torch.tensor(edge_index, dtype=torch.long)) self.n_edges_inner.append(edge_index.shape[1])", "** 2), np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i", "self.pairwise_difference(grid, grid3) grid4 = grid grid4[:, 0] = grid[:, 0] + 1 grid4[:,", "in np.meshgrid(xs, ys)]).T edge_index = [] edge_attr = [] for y in range(n_y):", "= n_edge_index edge_index_up_range[l, 1] = n_edge_index return edge_index_range, edge_index_down_range, edge_index_up_range def attributes(self, theta=None):", "+ n_x, i)) if a != None: a1 = a[x, y] a2 =", "x def decode(self, x, sample_idx=None): if sample_idx is None: std = self.std +", "assert len(radius_inner) == self.level assert len(radius_inter) == self.level - 1 self.edge_index = []", "we concatenate the edge index list and label the range of each level", "X_l], dim=1) X_global.append(X) # construct edges index1 = torch.tensor(range(n_l), dtype=torch.long) index1 = index1", "-1)[idx] theta_sample = theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.n, )[idx] grid_split = torch.cat([grid_sub, grid_sample],", "grid) grid1 = grid grid1[:,0] = grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) X_diff1, Y_diff1", "self.d == 1: self.n = mesh_size[0] self.grid = np.linspace(real_space[0][0], real_space[0][1], self.n).reshape((self.n, 1)) else:", "grid3) grid4 = grid grid4[:, 0] = grid[:, 0] + 1 grid4[:, 1]", "y] a2 = a[x + 1, y] edge_attr.append((x / n_x, y / n_y,", "a2 = a[x, y+1] edge_attr.append((x/n_x, y/n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x, a2, a1)) X", "self.attr_features = attr_features assert len(mesh_size) == self.d if self.d == 1: self.n =", "edge_attr[:, 3:3+self.edge_features] = a[edge_index[0]] edge_attr[:, 3+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float)", "def simple_grid(n_x, n_y): xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y)", "grid_l = xs grid_l = torch.tensor(grid_l, dtype=torch.float) print(grid_l.shape) grid_list.append(grid_l) theta_l = theta[:,:,:theta_d].reshape(N, s,", "sklearn.metrics from torch_geometric.data import Data import torch.nn as nn from scipy.ndimage import gaussian_filter", "sklearn.metrics.pairwise_distances(self.grid_sample[l], self.grid_sample[l+1]) edge_index = np.vstack(np.where(pwd <= radius_inter[l])) + index edge_index[1, :] = edge_index[1,", "range(batch_size2): pred_ij = pred_i[j,:] x, y = split_idx_i[j] if self.resolution%2==1: if x==0: nx", "theta_split = torch.cat([theta_sub, theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges, distance, X_difference, Y_difference =", "theta[self.edge_index_boundary[0]] edge_attr_boundary[:, 2 * self.d +1] = theta[self.edge_index_boundary[1]] else: xy = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) if", "out = gaussian_filter(out, sigma=sigma, mode='wrap') out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) def assembleT(self,", "<= self.radius)[0], np.where(pwd <= self.radius)[1], pwd_index) distance = PWD[PWD_index] X_difference = X_DIFF[PWD_index] Y_difference", "theta_list, edge_index_list, edge_index_list_cuda def get_edge_attr(grid, theta, edge_index): n_edges = edge_index.shape[1] edge_attr = np.zeros((n_edges,", "torch.tensor((0, 0,-1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter = torch.cat([edge_attr_inter1, edge_attr_inter2], dim=0) edge_index_global.append(edge_index_inter) edge_attr_global.append(edge_attr_inter) X =", "self.s = mesh_size[0] assert len(mesh_size) == self.d if self.d == 1: self.n =", "n_y a1 = a[x, y] a2 = a[x, y+1] edge_index.append((i, i + n_x))", "for t in range(self.T): for i in range(len(pred)): pred_i = pred[i][t].reshape(batch_size2, self.m) split_idx_i", "Y_diff2 = self.pairwise_difference(grid, grid2) grid3 = grid grid3[:, :] = grid[:, :] +", "= gaussian_filter(out, sigma=sigma, mode='wrap') out = torch.tensor(out, dtype=torch.float) return out.reshape(self.T,self.n) def downsample(data, grid_size,", "= X_difference.reshape(n_edges, ) edge_attr[:, 1] = Y_difference.reshape(n_edges, ) edge_attr[:, 2] = distance.reshape(n_edges, )", "a.reshape(n_x, n_y) xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y) #", "# we then compute the interactive neighbors -- their parents are NN but", "torch.randperm(self.n) m = self.m - grid_sub.shape[0] idx = perm[:m] grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample", "= 0 for l in range(self.level-1): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l], self.grid_sample[l+1]) edge_index = np.vstack(np.where(pwd", "/ 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i + n_x, i))", "torch.tensor(self.edge_index, dtype=torch.long) def torus1d_connectivity(self, r): grid = self.grid_sample pwd0 = sklearn.metrics.pairwise_distances(grid, grid) grid1", "/ (self.std + self.eps) return x def decode(self, x, sample_idx=None): if sample_idx is", "self.r = r self.n = resolution**2 self.m = m self.T = T self.radius", "grid) grid1 = grid grid1[:,0] = grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) PWD =", "= d self.p = p self.reduction = reduction self.size_average = size_average def abs(self,", "dtype=torch.long) edge_index_inter = edge_index_inter.transpose(0,1) edge_index_list.append(edge_index_inter) edge_index_list_cuda.append(edge_index_inter.cuda()) print('edge_inter', edge_index_inter.shape) print(len(grid_list),len(edge_index_list),len(theta_list)) return grid_list, theta_list, edge_index_list,", "],self.perm[: index_end]), dim=0) self.idx.append(idx) self.grid_sample.append(self.grid[idx]) index = index_end if index0 < index_end: idx_all", "1.0, n_x) ys = np.linspace(0.0, 1.0, n_y) grid = np.vstack([xx.ravel() for xx in", "= p self.reduction = reduction self.size_average = size_average def abs(self, x, y): num_examples", "self.n // self.m # number of sub-grid def get_data(self, theta, edge_features=1): data =", "theta_sample[:,0] edge_attr[:, :2*self.d] = grid_sample[edge_index.T].reshape(n_edges, -1) edge_attr[:, 2*self.d] = a[edge_index[0]] edge_attr[:, 2*self.d+1] =", "= grid[:, 0] + 1 grid4[:, 1] = grid[:, 1] - 1 pwd4", "self.s n = self.n boundary1 = np.array(range(0, s)) boundary2 = np.array(range(n - s,", "np.zeros((self.n_edges_22, 3 * self.d)) edge_attr_22[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) edge_attr_22[:, 2", "edge_attr[:, 2 * self.d + 1] = theta[self.edge_index[l][1]] self.edge_attr.append(torch.tensor(edge_attr, dtype=torch.float)) for l in", "= 0 # build connected graph for l in range(depth): h_x_l = n_x", "grid_list = [] theta_list = [] edge_index_list = [] edge_index_list_cuda = [] level", "= theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub = Y[:,x::self.r, y::self.r].reshape(self.T,-1) index_sub = self.index[x::self.r, y::self.r].reshape(-1,", "self.edge_index_down = [] self.edge_index_up = [] self.n_edges_inner = [] self.n_edges_inter = [] edge_index_out", "RandomMeshGenerator(object): def __init__(self, real_space, mesh_size, sample_size, attr_features=1): super(RandomMeshGenerator, self).__init__() self.d = len(real_space) self.m", "== self.d if self.d == 1: self.n = mesh_size[0] self.grid = np.linspace(real_space[0][0], real_space[0][1],", "X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index, dtype=torch.long).transpose(0,", "n_sub = Y_sub.shape[1] if self.m >= n_sub: m = self.m - n_sub perm", "n_edges = edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges, distance, X_difference, Y_difference def get_data(self, theta,", "edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index_boundary[0]], theta[self.edge_index_boundary[1]]) return torch.tensor(edge_attr_boundary,", "2*self.attr_features)) edge_attr[:, 0:2 * self.d] = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 * self.d :", "= radius self.edge_features = edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def ball_connectivity(self, grid):", "assert len(pred) == len(split_idx) assert len(pred) == self.r**2 // batch_size2 out = torch.zeros((self.resolution,self.resolution))", "+ 1 pwd3 = sklearn.metrics.pairwise_distances(grid, grid3) X_diff3, Y_diff3 = self.pairwise_difference(grid, grid3) grid4 =", "= torch.cat(self.edge_attr_up, dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out # generate graph, with split and", "edge_attr_boundary[:, 2 * self.d] = theta[self.edge_index_boundary[0]] edge_attr_boundary[:, 2 * self.d +1] = theta[self.edge_index_boundary[1]]", "mean return x def cuda(self): self.mean = self.mean.cuda() self.std = self.std.cuda() def cpu(self):", "self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def get_grid(self): return torch.tensor(self.grid, dtype=torch.float) def attributes(self, f=None, theta=None):", "= torch.tensor(out, dtype=torch.float) return out.reshape(-1,) # generate multi-level graph, with split and assemble", "[] self.n_edges_inter = [] def sample(self, new_sample=True, index0=0): self.idx = [] self.grid_sample =", "): super(TorusGridSplitter, self).__init__() self.grid = grid.reshape(resolution, resolution,2) # self.theta = theta.reshape(resolution, resolution,-1) #", "grid[:, 0] + 1 grid4[:, 1] = grid[:, 1] - 1 pwd4 =", "grid grid4[:, 0] = grid[:, 0] + 1 grid4[:, 1] = grid[:, 1]", "= torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def ball_connectivity(self, grid): pwd = sklearn.metrics.pairwise_distances(grid) edge_index = np.vstack(np.where(pwd", ") X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split)", "get_data(self, theta): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) data = []", "= np.zeros((n_edges, 3 + self.edge_features * 2)) a = theta_split[:, :self.edge_features] edge_attr[:, 0]", "edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge1d(n_x, a=None): if a", "edge_index.append((i, i2)) edge_index.append((i2, i )) if a != None: a1 = a[x] a2", "y], dtype=torch.long).reshape(1, 2) if params==None: data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split)", "idx = self.perm[index: index_end] else: idx = torch.cat((self.perm[index: ],self.perm[: index_end]), dim=0) self.idx.append(idx) self.grid_sample.append(self.grid[idx])", "= torch.tensor([x,y],dtype=torch.long).reshape(1,2) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data", "Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return", "* self.d + 1] = theta[self.edge_index_up[l][1]] self.edge_attr_up.append(torch.tensor(edge_attr_up, dtype=torch.float)) edge_attr_out = torch.cat(self.edge_attr, dim=0) edge_attr_down_out", "edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) else: edge_attr = np.zeros((self.n_edges, 2*self.d+2)) edge_attr[:,0:2*self.d] = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) edge_attr[:, 2", "= (x - self.mean) / (self.std + self.eps) return x def decode(self, x,", "theta[self.edge_index[l][0]] edge_attr[:, 2 * self.d + 1] = theta[self.edge_index[l][1]] self.edge_attr.append(torch.tensor(edge_attr, dtype=torch.float)) for l", "index_end: idx_all = self.perm[index0: index_end] else: idx_all = torch.cat((self.perm[index0:], self.perm[: index_end]), dim=0) self.idx_all", "else: pred = torch.zeros(self.n, ) for i in range(self.splits): pred[sample_idx_list[i]] = out_list[i].reshape(-1) return", "split_idx=split_idx, sample_idx=index_split) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def sampleT(self, theta,", "dim=0).reshape(-1, ) X = torch.cat([grid_split, theta_split], dim=1) else: grid_split = grid_sub theta_split =", "edge_attr.append((0, -1, 0)) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index", "= torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge_aug(n_x, n_y, a): a =", "= torch.cat([grid_sub, grid_sample],dim=0) theta_split = torch.cat([theta_sub, theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges, distance,", "y * n_x + x if (x != n_x - 1): d =", "theta_split = torch.cat([theta_sub, theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr =", "1) # X = torch.cat([X, X_l], dim=1) X_global.append(X) # construct edges index1 =", "self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) if theta is None: edge_attr = f(xy[:, 0:self.d], xy[:, self.d:]) else:", "assert len(pred) == self.num * self.l // batch_size2 out = torch.zeros(self.n, ) if", "- mymin) self.b = -self.a*mymax + high def encode(self, x): s = x.size()", "ny] out[x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='constant', cval=0)", "# # generate two-level graph class RandomTwoMeshGenerator(object): def __init__(self, real_space, mesh_size, sample_size, induced_point):", "data = [] for x in range(self.r): for y in range(self.r): grid_sub =", "build connected graph for l in range(depth): h_x_l = n_x // (2 **", "edge_attr_12 = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_21 = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_22 = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) else:", "= [] for x in range(self.r): for y in range(self.r): grid_sub = self.grid[x::self.r,", "pwd = np.min(PWD, axis=2) pwd_index = np.argmin(PWD, axis=2) edge_index = np.vstack(np.where(pwd <= self.radius))", "= perm.reshape(self.num, self.m) for j in range(self.num): idx = perm[j,:].reshape(-1,) grid_sample = self.grid.reshape(self.n,-1)[idx]", "self.edge_index.shape[1] self.n_edges_12 = self.edge_index_12.shape[1] self.n_edges_22 = self.edge_index_22.shape[1] return torch.tensor(self.edge_index, dtype=torch.long), \\ torch.tensor(self.edge_index_12, dtype=torch.long),", "= self.n // self.m # number of sub-grid if self.splits * self.m <", "grid_edge(h_y_l, h_x_l, a) # update index edge_index_inner = edge_index_inner + num_nodes edge_index_global.append(edge_index_inner) edge_attr_global.append(edge_attr_inner)", "+ self.eps) return x def decode(self, x, sample_idx=None): if sample_idx is None: std", "generate graph, with split and assemble class RandomGridSplitter(object): def __init__(self, grid, resolution, d=2,", "l in range(self.level): index = index % self.n index_end = (index+self.ms[l]) % self.n", "a=None): if a != None: a = a.reshape(n_x) xs = np.linspace(0.0, 1.0, n_x)", "pwd_index = np.argmin(PWD, axis=2) edge_index = np.vstack(np.where(pwd <= self.radius)) pwd_index = pwd_index[np.where(pwd <=", "a[x + 1] edge_attr.append((x / n_x, a1, a2)) edge_attr.append((x / n_x, a2, a1))", "axis=2) pwd = np.min(PWD, axis=2) pwd_index = np.argmin(PWD, axis=2) edge_index = np.vstack(np.where(pwd <=", "high=1.0): super(RangeNormalizer, self).__init__() mymin = torch.min(x, 0)[0].view(-1) mymax = torch.max(x, 0)[0].view(-1) self.a =", "index list and label the range of each level edge_index_range = torch.zeros((self.level,2), dtype=torch.long)", "self).__init__() self.d = len(real_space) self.s = mesh_size[0] assert len(mesh_size) == self.d if self.d", "np.min(PWD, axis=2) pwd_index = np.argmin(PWD, axis=2) edge_index = np.vstack(np.where(pwd <= self.radius)) pwd_index =", "RandomGridSplitter(object): def __init__(self, grid, resolution, d=2, m=200, l=1, radius=0.25): super(RandomGridSplitter, self).__init__() self.grid =", "= torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge1d(n_x, a=None): if a !=", "out[idx] = out[idx] + pred_ij out = out / self.l # out =", "grid(h_y_l, h_x_l) elif grid == 'grid_edge': X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l, a)", "self.grid_sample = self.grid def sample(self): perm = torch.randperm(self.n) self.idx = perm[:self.m] self.grid_sample =", "theta_l = theta[:,:,:theta_d].reshape(N, s, theta_d) theta_l = theta_l[:, ::r_l, :] theta_l = theta_l.reshape(N,", "grid2 = grid grid2[:, 1] = grid[:, 1] + 1 pwd2 = sklearn.metrics.pairwise_distances(grid,", "grid_edge_aug_full(n_x, n_y, r, a): n = n_x * n_y xs = np.linspace(0.0, 1.0,", "sigma): pwd = sklearn.metrics.pairwise_distances(self.grid) rbf = np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf) self.edge_index = np.vstack(np.where(sample))", "self.grid_sample[l].shape[0] index = 0 for l in range(self.level-1): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l], self.grid_sample[l+1]) edge_index", "if theta is None: edge_attr = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr_12 = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_21", "if resolution%2==1: self.s = int(((resolution - 1)/r) + 1) else: self.s = int(resolution/r)", "self.grid_sample_both = self.grid[self.idx_both] return self.idx, self.idx_i, self.idx_both def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float), \\", "a1)) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index,", "2*self.d+2)) a = theta_sample[:,0] edge_attr[:, :2*self.d] = grid_sample[edge_index.T].reshape(n_edges, -1) edge_attr[:, 2*self.d] = a[edge_index[0]]", "split_idx_i = split_idx[i].reshape(batch_size2, self.m) for j in range(batch_size2): pred_ij = pred_i[j,:].reshape(-1,) idx =", "l in range(self.level): edge_index_range[l, 0] = n_edge_index n_edge_index = n_edge_index + self.edge_index[l].shape[1] edge_index_range[l,", "torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr = np.zeros((n_edges, 4+self.edge_features*2)) a = theta_split[:, :self.edge_features]", "self.d] = theta[self.edge_index_12[0]] edge_attr_12[:, 2 * self.d + 1] = theta[self.edge_index_12[1]] edge_attr_21 =", "if a != None: a = a.reshape(n_x) xs = np.linspace(0.0, 1.0, n_x) #", "= x.view(s) return x def decode(self, x): s = x.size() x = x.view(s[0],", "theta[self.edge_index_boundary[1]]) return torch.tensor(edge_attr_boundary, dtype=torch.float) # generate graphs with sampling class RandomMeshGenerator(object): def __init__(self,", "l in range(self.level - 1): edge_attr_down = np.zeros((self.n_edges_inter[l], 2 * self.d + 2))", "to_float=True): super(MatReader, self).__init__() self.to_torch = to_torch self.to_cuda = to_cuda self.to_float = to_float self.file_path", ") self.perm = None self.idx = [] self.idx_all = None self.grid_sample = []", "theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) data = [] for x in range(self.r):", "X2 Y1 = np.tile(y1.reshape(n, 1), [1, n]) Y2 = np.tile(y2.reshape(1, n), [n, 1])", "y.view(num_examples,-1), self.p, 1) if self.reduction: if self.size_average: return torch.mean(all_norms) else: return torch.sum(all_norms) return", "dtype=torch.long), \\ torch.tensor(self.edge_index_21, dtype=torch.long), \\ torch.tensor(self.edge_index_22, dtype=torch.long) def attributes(self, theta=None): if theta is", "index_split.shape) return data def sampleT(self, theta, Y, params=None): theta_d = theta.shape[1] theta =", "= self.n boundary1 = np.array(range(0, s)) boundary2 = np.array(range(n - s, n)) boundary3", "= index2.repeat(2, axis = 0).repeat(2, axis = 1) index2 = torch.tensor(index2).reshape(-1) index2 =", "/ n_x, a2, a1)) X = torch.tensor(xs, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1)", "import torch.nn as nn from scipy.ndimage import gaussian_filter ################################################# # # Utilities #", "x.size()[0] #Assume uniform mesh h = 1.0 / (x.size()[1] - 1.0) all_norms =", "1.0) all_norms = (h**(self.d/self.p))*torch.norm(x.view(num_examples,-1) - y.view(num_examples,-1), self.p, 1) if self.reduction: if self.size_average: return", "self.d +1] = theta[self.edge_index_boundary[1]] else: xy = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) if theta is None: edge_attr_boundary", "self.m - n_sub perm = torch.randperm(self.n) idx = perm[:m] grid_sample = self.grid.reshape(self.n, -1)[idx]", "self.edge_attr_up = [] self.n_edges_inner = [] self.n_edges_inter = [] def sample(self, new_sample=True, index0=0):", "edge_index_range = torch.zeros((self.level,2), dtype=torch.long) edge_index_down_range = torch.zeros((self.level-1,2), dtype=torch.long) edge_index_up_range = torch.zeros((self.level-1,2), dtype=torch.long) n_edge_index", "is_periodic: x_j = x_j % s_l # if (xj, yj) is a valid", "out.reshape(self.T,self.n) def downsample(data, grid_size, l): data = data.reshape(-1, grid_size, grid_size) data = data[:,", "return torch.sum(all_norms) return all_norms def rel(self, x, y): num_examples = x.size()[0] diff_norms =", "theta is None: edge_attr = f(xy[:, 0:self.d], xy[:, self.d:]) else: theta = theta[self.idx]", "l in range(self.level): self.idx.append(perm[index: index+self.m[l]]) self.grid_sample.append(self.grid[self.idx[l]]) index = index+self.m[l] self.idx_all = perm[:index] self.grid_sample_all", "split and assemble class RandomMultiMeshSplitter(object): def __init__(self, real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshSplitter, self).__init__()", "grid_edge1d(n_x, a=None): if a != None: a = a.reshape(n_x) xs = np.linspace(0.0, 1.0,", "size_average=True, reduction=True): super(LpLoss, self).__init__() #Dimension and Lp-norm type are postive assert d >", "% self.m == 0 self.num = self.n // self.m # number of sub-grid", "to use graph network's data structure, # the edge index shall be stored", "0] = grid[:, 0] + 1 grid4[:, 1] = grid[:, 1] - 1", "Y = Y.reshape(self.T, self.resolution, self.resolution) x = torch.randint(0, self.r, (1,)) y = torch.randint(0,", "y = torch.randint(0, self.r, (1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub =", "self.l // batch_size2 out = torch.zeros(self.n, ) if cuda: out = out.cuda() for", "np.stack([vertice2, vertice1], axis=0) self.n_edges_boundary = self.edge_index_boundary.shape[1] return torch.tensor(self.edge_index_boundary, dtype=torch.long) def attributes_boundary(self, f=None, theta=None):", "n_x * n_y xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y)", "-self.a*mymax + high def encode(self, x): s = x.size() x = x.view(s[0], -1)", "edge_attr[:, 2*self.d+1] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=idx)) print('test',", "dtype=torch.float).repeat(n_l, 1) edge_attr_inter2 = torch.tensor((0, 0,-1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter = torch.cat([edge_attr_inter1, edge_attr_inter2], dim=0)", "= torch.cat([theta_sub, theta_sample], dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=1).reshape(self.T,-1) index_split = torch.cat([index_sub, idx],", "= [] edge_attr_global = [] X_global = [] num_nodes = 0 # build", "PWD = np.stack([pwd0,pwd1], axis=2) pwd = np.min(PWD, axis=2) self.edge_index = np.vstack(np.where(pwd <= r))", "def boundary_connectivity2d(self, stride=1): boundary = self.boundary[::stride] boundary_size = len(boundary) vertice1 = np.array(range(self.n)) vertice1", "if f is None: if theta is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges, -1)) else:", "np.array(range(self.n)) vertice1 = np.repeat(vertice1, boundary_size) vertice2 = np.tile(boundary, self.n) self.edge_index_boundary = np.stack([vertice2, vertice1],", "def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet, self).__init__() self.n_layers = len(layers) - 1", "pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for j in range(batch_size2): pred_ij = pred_i[j,:] x,", "self.old_mat = False def load_file(self, file_path): self.file_path = file_path self._load_file() def read_field(self, field):", "= Y_difference.reshape(n_edges, ) edge_attr[:, 2] = distance.reshape(n_edges, ) edge_attr[:, 3:3+self.edge_features] = a[edge_index[0]] edge_attr[:,", "torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3+self.edge_features*2)) a", "self.edge_index_up.append(edge_index[[1,0],:]) edge_index_up_out.append(torch.tensor(edge_index[[1,0],:], dtype=torch.long)) self.n_edges_inter.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0] edge_index_out = torch.cat(edge_index_out, dim=1)", "cval=0) # out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) # generate multi-level graph, with", "data.append(Data(x=x, edge_index_mid=edge_index, edge_index_down=edge_index_down, edge_index_up=edge_index_up, edge_index_range=edge_index_range, edge_index_down_range=edge_index_down_range, edge_index_up_range=edge_index_up_range, edge_attr_mid=edge_attr, edge_attr_down=edge_attr_down, edge_attr_up=edge_attr_up, sample_idx=idx[0])) return data", "- self.mean) / (self.std + self.eps) return x def decode(self, x, sample_idx=None): x", "self.grid self.grid_sample_i = self.grid self.grid_sample_both = self.grid def sample(self): perm = torch.randperm(self.n) self.idx", "pwd = sklearn.metrics.pairwise_distances(self.grid_sample) rbf = np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf) self.edge_index = np.vstack(np.where(sample)) self.n_edges", "dim=0) edge_index = torch.cat(edge_index_global, dim=1) edge_attr = torch.cat(edge_attr_global, dim=0) mask_index = torch.tensor(range(n_x *", "import sklearn.metrics from torch_geometric.data import Data import torch.nn as nn from scipy.ndimage import", ":4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4 + self.edge_features] = a[edge_index[0]] edge_attr[:, 4 +", "= theta[self.edge_index[1]] else: xy = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) if theta is None: edge_attr = f(xy[:,0:self.d],", "(X, edge_index, edge_attr, mask_index, num_nodes) def multi_pole_grid1d(theta, theta_d, s, N, is_periodic=False): grid_list =", "edge_index_up_range = self.get_edge_index_range() edge_attr, edge_attr_down, edge_attr_up = self.attributes(theta=theta_a) x = torch.cat([grid_all, theta_all[idx_all,:] ],", "= grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) X_diff1, Y_diff1 = self.pairwise_difference(grid, grid1) grid2 =", "n = n_x * n_y xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0,", "= len(real_space) self.ms = sample_sizes self.m = sample_sizes[0] self.level = level assert len(sample_sizes)", "= [] self.edge_attr_up = [] self.n_edges_inner = [] self.n_edges_inter = [] def sample(self):", "sample_idx=None): if sample_idx is None: std = self.std + self.eps # n mean", "X = torch.cat([grid_split, theta_split], dim=1) else: grid_split = grid_sub theta_split = theta_sub Y_split", "j in range(batch_size2): pred_ij = pred_i[j,:] x, y = split_idx_i[j] if self.resolution%2==1: if", "node if is_periodic: x_j = x_j % s_l if (x_j in range(s_l)): #", "= np.repeat(vertice1, boundary_size) vertice2 = np.tile(boundary, self.n) self.edge_index_boundary = np.stack([vertice2, vertice1], axis=0) self.n_edges_boundary", "= grid[edge_index.transpose(0,1)].reshape((n_edges, -1)) edge_attr[:, 2] = theta[edge_index[0]] edge_attr[:, 3] = theta[edge_index[1]] return torch.tensor(edge_attr,", "normalize: self.layers.append(nn.BatchNorm1d(layers[j+1])) self.layers.append(nonlinearity()) if out_nonlinearity is not None: self.layers.append(out_nonlinearity()) def forward(self, x): for", "self.d)) edge_attr_22[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) edge_attr_22[:, 2 * self.d] =", "range of each level edge_index_range = torch.zeros((self.level,2), dtype=torch.long) edge_index_down_range = torch.zeros((self.level-1,2), dtype=torch.long) edge_index_up_range", "* self.d] = theta[self.edge_index_up[l][0]] edge_attr_up[:, 2 * self.d + 1] = theta[self.edge_index_up[l][1]] self.edge_attr_up.append(torch.tensor(edge_attr_up,", "n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3+self.edge_features*2)) a = theta_split[:,", "*= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T self.idx = [] self.idx_all", "edge_attr_up_out # generate graph, with split and assemble class RandomGridSplitter(object): def __init__(self, grid,", "self.edge_features * 2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y],", "(-1,1): x_j = x_i + x if is_periodic: x_j = x_j % s_l", "np.zeros((n_edges, 2*self.d+2)) a = theta_sample[:,0] edge_attr[:, :2*self.d] = grid_sample[edge_index.T].reshape(n_edges, -1) edge_attr[:, 2*self.d] =", "= torch.cat(self.edge_attr, dim=0) edge_attr_down_out = torch.cat(self.edge_attr_down, dim=0) edge_attr_up_out = torch.cat(self.edge_attr_up, dim=0) return edge_attr_out,", "torch.tensor(self.edge_index, dtype=torch.long), \\ torch.tensor(self.edge_index_12, dtype=torch.long), \\ torch.tensor(self.edge_index_21, dtype=torch.long), \\ torch.tensor(self.edge_index_22, dtype=torch.long) def attributes(self,", "theta_all): # give a test mesh, generate a list of data data =", "= torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index, dtype=torch.long).transpose(0, 1)", "PWD[PWD_index] X_difference = X_DIFF[PWD_index] Y_difference = Y_DIFF[PWD_index] n_edges = edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long),", "self.d] = theta[self.edge_index_down[l][0]] edge_attr_down[:, 2 * self.d + 1] = theta[self.edge_index_down[l][1]] self.edge_attr_down.append(torch.tensor(edge_attr_down, dtype=torch.float))", "y): num_examples = x.size()[0] diff_norms = torch.norm(x.reshape(num_examples,-1) - y.reshape(num_examples,-1), self.p, 1) y_norms =", "self.layers.append(nn.BatchNorm1d(layers[j+1])) self.layers.append(nonlinearity()) if out_nonlinearity is not None: self.layers.append(out_nonlinearity()) def forward(self, x): for _,", "sample(self): perm = torch.randperm(self.n) self.idx = perm[:self.m] self.idx_i = perm[self.m: self.m+self.m_i] self.idx_both =", "self.m - grid_sub.shape[0] idx = perm[:m] grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] grid_split", "== self.r**2 // batch_size2 out = torch.zeros((self.resolution,self.resolution)) for i in range(len(pred)): pred_i =", "= to_torch self.to_cuda = to_cuda self.to_float = to_float self.file_path = file_path self.data =", "None: if theta is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges, -1)) else: theta = theta[self.idx]", "self.grid_sample: grid_out.append(torch.tensor(grid, dtype=torch.float)) return grid_out, torch.tensor(self.grid_sample_all, dtype=torch.float) def ball_connectivity(self, radius_inner, radius_inter): assert len(radius_inner)", "d=2, p=2, size_average=True, reduction=True): super(LpLoss, self).__init__() #Dimension and Lp-norm type are postive assert", "feedforward neural network class DenseNet(torch.nn.Module): def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet, self).__init__()", "= self.edge_index_22.shape[1] return torch.tensor(self.edge_index, dtype=torch.long), \\ torch.tensor(self.edge_index_12, dtype=torch.long), \\ torch.tensor(self.edge_index_21, dtype=torch.long), \\ torch.tensor(self.edge_index_22,", "n_sub: m = self.m - n_sub perm = torch.randperm(self.n) idx = perm[:m] grid_sample", "len(pred) == self.r**2 // batch_size2 out = torch.zeros((self.resolution,self.resolution)) for i in range(len(pred)): pred_i", "np.random.binomial(1,rbf) self.edge_index = np.vstack(np.where(sample)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def get_grid(self): return", "i)) edge_attr.append((0, -1, 0)) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1)", "from torch_geometric.data import Data import torch.nn as nn from scipy.ndimage import gaussian_filter #################################################", "= l(x) if j != self.n_layers - 1: x = torch.sin(x) return x", "graphs on square domain class SquareMeshGenerator(object): def __init__(self, real_space, mesh_size): super(SquareMeshGenerator, self).__init__() self.d", "i)) edge_attr.append((-1, 0, 0)) if (y != n_y - 1): edge_index.append((i, i +", "i in range(self.splits): pred[sample_idx_list[i]] = out_list[i].reshape(-1) return pred # generate graph, with split", "torch.randperm(self.n) perm = perm.reshape(self.num, self.m) for j in range(self.num): idx = perm[j,:].reshape(-1,) grid_sample", "resolution, d=2, m=200, l=1, radius=0.25): super(RandomGridSplitter, self).__init__() self.grid = grid self.resolution = resolution", "* self.d)) edge_attr_12[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_12[:, 2 * self.d]", "T*batch*n mean = self.mean[:,sample_idx] # x is in shape of batch*n or T*batch*n", "gaussian_filter(out, sigma=sigma, mode='wrap') out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) def assembleT(self, pred, split_idx,", "stride=1): boundary = self.boundary[::stride] boundary_size = len(boundary) vertice1 = np.array(range(self.n)) vertice1 = np.repeat(vertice1,", "self.n: self.m = self.n self.idx = np.array(range(self.n)) self.grid_sample = self.grid def sample(self): perm", "<= self.radius)) pwd_index = pwd_index[np.where(pwd <= self.radius)] PWD_index = (np.where(pwd <= self.radius)[0], np.where(pwd", "self.level - 1 self.edge_index = [] self.edge_index_down = [] self.edge_index_up = [] self.n_edges_inner", "1.0 / (x.size()[1] - 1.0) all_norms = (h**(self.d/self.p))*torch.norm(x.view(num_examples,-1) - y.view(num_examples,-1), self.p, 1) if", "in np.meshgrid(xs, ys)]).T edge_index = [] edge_attr = [] for i1 in range(n):", "range(n): x2 = grid[i2] d = np.linalg.norm(x1-x2) if(d<=r): a1 = a[i1] a2 =", "r12, r22): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) pwd12 = sklearn.metrics.pairwise_distances(self.grid_sample, self.grid_sample_i) pwd22 = sklearn.metrics.pairwise_distances(self.grid_sample_i) self.edge_index", "for l in range(self.level): edge_attr = self.grid_sample_all[self.edge_index[l].T].reshape((self.n_edges_inner[l], 2*self.d)) self.edge_attr.append(torch.tensor(edge_attr)) for l in range(self.level", "= self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) else: edge_attr_boundary = np.zeros((self.n_edges_boundary, 2*self.d+2)) edge_attr_boundary[:,0:2*self.d] = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) edge_attr_boundary[:, 2 *", "axis=2) Y_DIFF = np.stack([Y_diff0, Y_diff1, Y_diff2, Y_diff3, Y_diff4], axis=2) pwd = np.min(PWD, axis=2)", "edge_index.append((i + 1, i)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d)", "\\ torch.tensor(self.edge_index_22, dtype=torch.long) def attributes(self, theta=None): if theta is None: edge_attr = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges,", "else: # X_l = torch.tensor(l, dtype=torch.float).repeat(n_l, 1) # X = torch.cat([X, X_l], dim=1)", "[] self.edge_index_down = [] self.edge_index_up = [] self.edge_attr = [] self.edge_attr_down = []", "0)) edge_index.append((i + n_x, i)) edge_attr.append((0, -1, 0)) X = torch.tensor(grid, dtype=torch.float) #", "mesh_size): super(SquareMeshGenerator, self).__init__() self.d = len(real_space) self.s = mesh_size[0] assert len(mesh_size) == self.d", "grid_edge(h_y_l, h_x_l, a) elif grid == 'grid_edge_aug': X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l,", "edge_attr[:, 4:4+self.edge_features] = a[edge_index[0]] edge_attr[:, 4+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float)", "a): a = a.reshape(n_x, n_y) xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0,", "edge_attr = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) def get_boundary(self): s =", "self.perm[: index_end]), dim=0) self.idx_all = idx_all self.grid_sample_all = self.grid[self.idx_all] return self.idx, self.idx_all def", "self.edge_attr.append(torch.tensor(edge_attr)) for l in range(self.level - 1): edge_attr_down = self.grid_sample_all[self.edge_index_down[l].T].reshape((self.n_edges_inter[l], 2*self.d)) edge_attr_up =", "neighbors (NN) if l==1: edge_index_nn = [] for x_i in range(s_l): for x", "(self.n_edges_inter[l], 2 * self.d)) edge_attr_down[:, 2 * self.d] = theta[self.edge_index_down[l][0]] edge_attr_down[:, 2 *", "index0 for l in range(self.level): index = index % self.n index_end = (index+self.ms[l])", "if x==0: nx = self.s else: nx = self.s-1 if y==0: ny =", "X_global = [] num_nodes = 0 # build connected graph for l in", "is not None: self.layers.append(out_nonlinearity()) def forward(self, x): for _, l in enumerate(self.layers): x", "= self.ball_connectivity(radius_inner, radius_inter) edge_index_range, edge_index_down_range, edge_index_up_range = self.get_edge_index_range() edge_attr, edge_attr_down, edge_attr_up = self.attributes(theta=theta_a)", "!= depth-1: index2 = np.array(range(n_l//4)).reshape(h_x_l//2, h_y_l//2) # torch.repeat is different from numpy index2", "= torch.cat(edge_index_global, dim=1) edge_attr = torch.cat(edge_attr_global, dim=0) mask_index = torch.tensor(range(n_x * n_y), dtype=torch.long)", "= np.linspace(real_space[0][0], real_space[0][1], self.n).reshape((self.n, 1)) else: self.n = 1 grids = [] for", "l=1, radius=0.25): super(RandomGridSplitter, self).__init__() self.grid = grid self.resolution = resolution self.n = resolution**d", "self.grid_sample_all[self.edge_index_down[l].T].reshape((self.n_edges_inter[l], 2*self.d)) edge_attr_up = self.grid_sample_all[self.edge_index_up[l].T].reshape((self.n_edges_inter[l], 2*self.d)) self.edge_attr_down.append(torch.tensor(edge_attr_down)) self.edge_attr_up.append(torch.tensor(edge_attr_up)) else: theta = theta[self.idx_all] for", "return torch.tensor(self.edge_index, dtype=torch.long), \\ torch.tensor(self.edge_index_12, dtype=torch.long), \\ torch.tensor(self.edge_index_21, dtype=torch.long), \\ torch.tensor(self.edge_index_22, dtype=torch.long) def", "x, eps=0.00001): super(UnitGaussianNormalizer, self).__init__() # x could be in shape of ntrain*n or", "to_cuda self.to_float = to_float self.file_path = file_path self.data = None self.old_mat = None", ":] theta_l = theta_l.reshape(N, n_l, theta_d) theta_l = torch.tensor(theta_l, dtype=torch.float) print(theta_l.shape) theta_list.append(theta_l) #", "self.to_float = to_float self.file_path = file_path self.data = None self.old_mat = None self._load_file()", "valid node if (x_j in range(s_l)): edge_index_nn.append([x_i,x_j]) edge_index_nn = torch.tensor(edge_index_nn, dtype=torch.long) edge_index_nn =", "generate two-level graph class RandomTwoMeshGenerator(object): def __init__(self, real_space, mesh_size, sample_size, induced_point): super(RandomTwoMeshGenerator, self).__init__()", "super(RandomMultiMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_sizes self.level = level assert len(sample_sizes)", "= grid grid2[:, 1] = grid[:, 1] + 1 pwd2 = sklearn.metrics.pairwise_distances(grid, grid2)", "dtype=torch.float)) edge_attr_out = torch.cat(self.edge_attr, dim=0) edge_attr_down_out = torch.cat(self.edge_attr_down, dim=0) edge_attr_up_out = torch.cat(self.edge_attr_up, dim=0)", "a = theta_sample[:,0] edge_attr[:, :2*self.d] = grid_sample[edge_index.T].reshape(n_edges, -1) edge_attr[:, 2*self.d] = a[edge_index[0]] edge_attr[:,", "sklearn.metrics.pairwise_distances(grid) edge_index = np.vstack(np.where(pwd <= self.radius)) n_edges = edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges", "= len(real_space) self.s = mesh_size[0] assert len(mesh_size) == self.d if self.d == 1:", "# normalization, pointwise gaussian class UnitGaussianNormalizer(object): def __init__(self, x, eps=0.00001): super(UnitGaussianNormalizer, self).__init__() #", "None: edge_attr = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr_12 = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_21 = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1))", "index = index + self.grid_sample[l].shape[0] index = 0 for l in range(self.level-1): pwd", "in range(self.r): for y in range(self.r): grid_sub = self.grid[x::self.r, y::self.r,:].reshape(-1,2) theta_sub = theta[x::self.r,", "= h5py.File(self.file_path) self.old_mat = False def load_file(self, file_path): self.file_path = file_path self._load_file() def", "get_data(self, theta, edge_features=1): data = [] for i in range(self.l): perm = torch.randperm(self.n)", "index2 = index2 + num_nodes index2 = torch.tensor(index2, dtype=torch.long) edge_index_inter1 = torch.cat([index1,index2], dim=-1).reshape(2,-1)", "distance, X_difference, Y_difference def get_data(self, theta, params=None): theta_d = theta.shape[1] theta = theta.reshape(self.resolution,", "ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='wrap') out = torch.tensor(out, dtype=torch.float) return out.reshape(self.T,self.n) def", "layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet_sin, self).__init__() self.n_layers = len(layers) - 1 assert self.n_layers", "+ n_x, i)) edge_attr.append((0, -1, 0)) X = torch.tensor(grid, dtype=torch.float) # Exact =", "else: nx = self.s-1 if y==0: ny = self.s else: ny = self.s-1", "+ n_x)) edge_index.append((i + n_x, i)) if a != None: a1 = a[x,", "def get_boundary(self): s = self.s n = self.n boundary1 = np.array(range(0, s)) boundary2", "self.grid_sample_all = self.grid[self.idx_all] return self.idx, self.idx_all def get_grid(self): grid_out = [] for grid", "self.level = level assert len(sample_sizes) == level assert len(mesh_size) == self.d if self.d", "sample(self, theta, Y): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) Y =", "dim=0) edge_index_global.append(edge_index_inter) edge_attr_global.append(edge_attr_inter) X = torch.cat(X_global, dim=0) edge_index = torch.cat(edge_index_global, dim=1) edge_attr =", "__init__(self, real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshSplitter, self).__init__() self.d = len(real_space) self.ms = sample_sizes", "simple_grid(n_x, n_y): xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y) #", "edge_attr.append((1, 0, 0)) edge_index.append((i + 1, i)) edge_attr.append((-1, 0, 0)) if (y !=", "= a[edge_index[0]] edge_attr[:, 3+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx =", "dim=1) edge_attr_inter1 = torch.tensor((0, 0, 1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter2 = torch.tensor((0, 0,-1), dtype=torch.float).repeat(n_l,", "= s_l print('level',s_l,r_l,n_l) xs = np.linspace(0.0, 1.0, s_l) grid_l = xs grid_l =", "layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet, self).__init__() self.n_layers = len(layers) - 1 assert self.n_layers", "to_cuda def set_torch(self, to_torch): self.to_torch = to_torch def set_float(self, to_float): self.to_float = to_float", "edge index list and label the range of each level edge_index_range = torch.zeros((self.level,2),", "abs(x_i//2 - x_j//2)%(s_l//2) <=1: edge_index_inter.append([x_i,x_j]) edge_index_inter = torch.tensor(edge_index_inter, dtype=torch.long) edge_index_inter = edge_index_inter.transpose(0,1) edge_index_list.append(edge_index_inter)", "= m self.radius = radius self.edge_features = edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution)", "network's data structure, # the edge index shall be stored as tensor instead", "edge_index.shape[1] edge_attr = np.zeros((n_edges, 4)) edge_attr[:, 0:2] = grid[edge_index.transpose(0,1)].reshape((n_edges, -1)) edge_attr[:, 2] =", "self.s # pred_ij = pred_i[idx : idx + nx * ny] out[t, x::self.r,", "pred, split_idx, batch_size2, sigma=1, cuda=False): assert len(pred) == len(split_idx) assert len(pred) == self.num", "+1] = theta[self.edge_index[1]] else: xy = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) if theta is None: edge_attr =", "= theta[x::self.r, y::self.r,:].reshape(-1,theta_d) perm = torch.randperm(self.n) m = self.m - grid_sub.shape[0] idx =", "= theta[self.edge_index_22[0]] edge_attr_22[:, 2 * self.d + 1] = theta[self.edge_index_22[1]] return torch.tensor(edge_attr, dtype=torch.float),", "grid2): n = grid1.shape[0] x1 = grid1[:,0] y1 = grid1[:,1] x2 = grid2[:,0]", "edge_index_inter = edge_index_inter.transpose(0,1) edge_index_list.append(edge_index_inter) edge_index_list_cuda.append(edge_index_inter.cuda()) print('edge_inter', edge_index_inter.shape) print(len(grid_list),len(edge_index_list),len(theta_list)) return grid_list, theta_list, edge_index_list, edge_index_list_cuda", "= self.s # pred_ij = pred_i[idx : idx + nx * ny] out[t,", "i2 in range(n): x2 = grid[i2] d = np.linalg.norm(x1-x2) if(d<=r): a1 = a[i1]", "= self.grid[self.idx_both] return self.idx, self.idx_i, self.idx_both def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float), \\ torch.tensor(self.grid_sample_i,", "= Y_sub.reshape(-1,) index_split = index_sub.reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges =", "xy[:,self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) def get_boundary(self): s = self.s n =", "= f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index_boundary[0]], theta[self.edge_index_boundary[1]]) return torch.tensor(edge_attr_boundary, dtype=torch.float) # generate graphs with sampling", "level = int(np.log2(s) - 1) print(level) for l in range(1, level+1): r_l =", "self.a = (high - low)/(mymax - mymin) self.b = -self.a*mymax + high def", "* self.d)) edge_attr[:, 2 * self.d] = theta[self.edge_index[l][0]] edge_attr[:, 2 * self.d +", "2)) a = theta_split[:, :self.edge_features] edge_attr[:, 0] = X_difference.reshape(n_edges, ) edge_attr[:, 1] =", "generate graph on Torus, with split and assemble class TorusGridSplitter(object): def __init__(self, grid,", "(xj, yj) is a valid node if (x_j in range(s_l)): edge_index_nn.append([x_i,x_j]) edge_index_nn =", "def ball_connectivity(self, radius_inner, radius_inter): assert len(radius_inner) == self.level assert len(radius_inter) == self.level -", "self.r = r self.n = resolution**2 self.m = m self.radius = radius self.edge_features", "mesh_size[j])) self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T def ball_connectivity(self,", "np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i + 1,", "def multi_pole_grid1d(theta, theta_d, s, N, is_periodic=False): grid_list = [] theta_list = [] edge_index_list", "= (h**(self.d/self.p))*torch.norm(x.view(num_examples,-1) - y.view(num_examples,-1), self.p, 1) if self.reduction: if self.size_average: return torch.mean(all_norms) else:", "2 * self.d + self.attr_features] = theta[self.edge_index[0]].view(-1, self.attr_features) edge_attr[:, 2 * self.d +", "x, low=0.0, high=1.0): super(RangeNormalizer, self).__init__() mymin = torch.min(x, 0)[0].view(-1) mymax = torch.max(x, 0)[0].view(-1)", "= gaussian_filter(out, sigma=sigma, mode='constant', cval=0) out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) # generate", "in self.grid_sample: grid_out.append(torch.tensor(grid, dtype=torch.float)) return grid_out, torch.tensor(self.grid_sample_all, dtype=torch.float) def ball_connectivity(self, radius_inner, radius_inter): assert", "is a list (batches) of list (time seq) assert len(pred) == len(split_idx) assert", "torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def multi_grid(depth,", "= grid1[:,1] x2 = grid2[:,0] y2 = grid2[:,1] X1 = np.tile(x1.reshape(n, 1), [1,", "params=params)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def sample(self, theta, Y): theta_d", "edge_attr = f(xy[:, 0:self.d], xy[:, self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) # #", "= Y.reshape(self.T, self.n)[:,idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0) theta_split = torch.cat([theta_sub, theta_sample], dim=0)", "/ n_x, y / n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x, a2, a1)) if (y", "[] for i1 in range(n): x1 = grid[i1] for i2 in range(n): x2", "level assert len(mesh_size) == self.d if self.d == 1: self.n = mesh_size[0] self.grid", "we then compute the interactive neighbors -- their parents are NN but they", "sigma=1, cuda=False): assert len(pred) == len(split_idx) assert len(pred) == self.num * self.l //", "for _, l in enumerate(self.layers): x = l(x) return x class DenseNet_sin(torch.nn.Module): def", "self.resolution) x = torch.randint(0,self.r,(1,)) y = torch.randint(0,self.r,(1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1, 2)", "edge_index_list_cuda def get_edge_attr(grid, theta, edge_index): n_edges = edge_index.shape[1] edge_attr = np.zeros((n_edges, 4)) edge_attr[:,", "__init__(self, real_space, mesh_size): super(SquareMeshGenerator, self).__init__() self.d = len(real_space) self.s = mesh_size[0] assert len(mesh_size)", "are NN if abs(x_i//2 - x_j//2)%(s_l//2) <=1: edge_index_inter.append([x_i,x_j]) edge_index_inter = torch.tensor(edge_index_inter, dtype=torch.long) edge_index_inter", "x): s = x.size() x = x.view(s[0], -1) x = self.a*x + self.b", "return x #loss function with rel/abs Lp loss class LpLoss(object): def __init__(self, d=2,", "import torch import numpy as np import scipy.io import h5py import sklearn.metrics from", "= n_edge_index + self.edge_index[l].shape[1] edge_index_range[l, 1] = n_edge_index n_edge_index = 0 for l", "edge_attr[:, 4:4 + self.edge_features] = a[edge_index[0]] edge_attr[:, 4 + self.edge_features: 4 + self.edge_features", "if (x != n_x - 1): d = 1 / n_x edge_index.append((i, i", "= perm[:self.m] self.idx_i = perm[self.m: self.m+self.m_i] self.idx_both = perm[: self.m+self.m_i] self.grid_sample = self.grid[self.idx]", "= torch.tensor(grid_l, dtype=torch.float) print(grid_l.shape) grid_list.append(grid_l) theta_l = theta[:,:,:theta_d].reshape(N, s, theta_d) theta_l = theta_l[:,", "= torch.tensor(out, dtype=torch.float) return out.reshape(self.T,self.n) def downsample(data, grid_size, l): data = data.reshape(-1, grid_size,", "split_idx=split_idx, sample_idx=index_split) else: data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split, params=params) print('train',", "n_y, a): a = a.reshape(n_x, n_y) xs = np.linspace(0.0, 1.0, n_x) ys =", "self.layers.append(out_nonlinearity()) def forward(self, x): for _, l in enumerate(self.layers): x = l(x) return", "torch import numpy as np import scipy.io import h5py import sklearn.metrics from torch_geometric.data", "= [] self.n_edges_inner = [] self.n_edges_inter = [] def sample(self, new_sample=True, index0=0): self.idx", "2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2)", "__init__(self, x, low=0.0, high=1.0): super(RangeNormalizer, self).__init__() mymin = torch.min(x, 0)[0].view(-1) mymax = torch.max(x,", "a1 = a[i1] a2 = a[i2] edge_index.append((i1, i2)) edge_attr.append((d, a1, a2, 1 /", "ys = np.linspace(0.0, 1.0, n_y) # xs = np.array(range(n_x)) # ys = np.array(range(n_y))", "axis = 1) index2 = torch.tensor(index2).reshape(-1) index2 = index2 + num_nodes index2 =", "f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) def get_boundary(self): s = self.s n", "1): d = 1 / n_x edge_index.append((i, i + 1)) edge_index.append((i + 1,", "n_y - 1): d = 1 / n_y edge_index.append((i, i + n_x)) edge_index.append((i", "__init__(self, x, eps=0.00001): super(UnitGaussianNormalizer, self).__init__() # x could be in shape of ntrain*n", "resolution**d self.d = d self.m = m self.l = l self.radius = radius", "= np.zeros((n_edges, 3+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, 0] = X_difference.reshape(n_edges, ) edge_attr[:,", "torch.tensor((0, 0, 1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter2 = torch.tensor((0, 0,-1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter =", "index_sub.reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr = np.zeros((n_edges,", "range(self.level): edge_index_range[l, 0] = n_edge_index n_edge_index = n_edge_index + self.edge_index[l].shape[1] edge_index_range[l, 1] =", "enumerate(self.layers): x = l(x) return x class DenseNet_sin(torch.nn.Module): def __init__(self, layers, nonlinearity, out_nonlinearity=None,", "+ self.edge_features * 2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,", "self.m # number of sub-grid if self.splits * self.m < self.n: self.splits =", "np.exp(-(d / 0.01) ** 2))) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact,", "= Y[:,x::self.r, y::self.r].reshape(self.T,-1) index_sub = self.index[x::self.r, y::self.r].reshape(-1, ) n_sub = Y_sub.shape[1] if self.m", "= a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=idx)) print('test', len(data), X.shape,", "= resolution**2 self.m = m self.T = T self.radius = radius self.edge_features =", "= self.s else: ny = self.s-1 else: nx = self.s ny = self.s", "r self.n = resolution**2 self.m = m self.radius = radius self.edge_features = edge_features", "self.get_edge_index_range() edge_attr, edge_attr_down, edge_attr_up = self.attributes(theta=theta_a) x = torch.cat([grid_all, theta_all[idx_all,:] ], dim=1) data.append(Data(x=x,", "1 print('n:',self.n,' m:',self.m, ' number of splits:', self.splits ) self.perm = None self.idx", "from scipy.ndimage import gaussian_filter ################################################# # # Utilities # ################################################# device = torch.device('cuda'", "self.grid[self.idx] return self.idx def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float) def ball_connectivity(self, r, is_forward=False): pwd", "= theta[self.edge_index_up[l][1]] self.edge_attr_up.append(torch.tensor(edge_attr_up, dtype=torch.float)) edge_attr_out = torch.cat(self.edge_attr, dim=0) edge_attr_down_out = torch.cat(self.edge_attr_down, dim=0) edge_attr_up_out", "(h**(self.d/self.p))*torch.norm(x.view(num_examples,-1) - y.view(num_examples,-1), self.p, 1) if self.reduction: if self.size_average: return torch.mean(all_norms) else: return", "dtype=torch.float), \\ torch.tensor(edge_attr_12, dtype=torch.float), \\ torch.tensor(edge_attr_21, dtype=torch.float), \\ torch.tensor(edge_attr_22, dtype=torch.float) # generate multi-level", "to_float self.file_path = file_path self.data = None self.old_mat = None self._load_file() def _load_file(self):", "i2)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d", "a.reshape(n_x) xs = np.linspace(0.0, 1.0, n_x) # xs = np.array(range(n_x)) # ys =", "self.edge_attr_down.append(torch.tensor(edge_attr_down, dtype=torch.float)) edge_attr_up[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_up[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_up[:,", "resolution, r, m=100, radius=0.15, T=None, edge_features=1, ): super(TorusGridSplitter, self).__init__() self.grid = grid.reshape(resolution, resolution,2)", "* n_y xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y) grid", "scipy.io import h5py import sklearn.metrics from torch_geometric.data import Data import torch.nn as nn", "all_norms def rel(self, x, y): num_examples = x.size()[0] diff_norms = torch.norm(x.reshape(num_examples,-1) - y.reshape(num_examples,-1),", "label the range of each level edge_index_range = torch.zeros((self.level,2), dtype=torch.long) edge_index_down_range = torch.zeros((self.level-1,2),", "len(pred[0]) == self.T assert len(pred) == self.r**2 // batch_size2 out = torch.zeros((self.T, self.resolution,self.resolution))", "% s_l if (x_j in range(s_l)): # if (xi, yi), (xj, yj) not", "3 * self.d)) edge_attr[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 *", "ball_connectivity(self, radius_inner, radius_inter): assert len(radius_inner) == self.level assert len(radius_inter) == self.level - 1", "dim=0) edge_attr_up_out = torch.cat(self.edge_attr_up, dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out def splitter(self, radius_inner, radius_inter,", "torus_connectivity(self, grid): pwd0 = sklearn.metrics.pairwise_distances(grid, grid) X_diff0, Y_diff0 = self.pairwise_difference(grid, grid) grid1 =", "edge_index, edge_attr, mask_index, num_nodes) def multi_pole_grid1d(theta, theta_d, s, N, is_periodic=False): grid_list = []", "edge_features == 0: edge_attr = grid_sample[edge_index.T].reshape(n_edges, -1) else: edge_attr = np.zeros((n_edges, 2*self.d+2)) a", "edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) else: data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split, params=params)", "0) self.std = torch.std(x, 0) self.eps = eps def encode(self, x): x =", "x = torch.sin(x) return x # generate graphs on square domain class SquareMeshGenerator(object):", "# xs = np.array(range(n_x)) # ys = np.array(range(n_y)) edge_index = [] edge_attr =", "a2 = a[i2] edge_index.append((i1, i2)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)),", "> self.n: self.m = self.n self.idx = np.array(range(self.n)) self.idx_i = self.idx self.idx_both =", "(x + 2) % n_x edge_index.append((i, i2)) edge_index.append((i2, i )) if a !=", "4)) edge_attr[:, 0:2] = grid[edge_index.transpose(0,1)].reshape((n_edges, -1)) edge_attr[:, 2] = theta[edge_index[0]] edge_attr[:, 3] =", "theta is None: edge_attr = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr_12 = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_21 =", "graph class RandomTwoMeshGenerator(object): def __init__(self, real_space, mesh_size, sample_size, induced_point): super(RandomTwoMeshGenerator, self).__init__() self.d =", "self.m # number of sub-grid def get_data(self, theta, edge_features=1): data = [] for", "data.reshape(-1, (grid_size // l) ** 2) return data def simple_grid(n_x, n_y): xs =", "X, torch.zeros(n_l, (depth - 1 - l) * 2)], dim=1) # else: #", "torch.tensor(self.edge_index, dtype=torch.long) def gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) rbf = np.exp(-pwd**2/sigma**2) sample =", "if (xj, yj) is a valid node if (x_j in range(s_l)): edge_index_nn.append([x_i,x_j]) edge_index_nn", "None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges, -1)) else: theta = theta[self.idx] edge_attr = np.zeros((self.n_edges, 2", "theta_split], dim=1) edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr = np.zeros((n_edges, 4+self.edge_features*2)) a = theta_split[:,", "** l) n_l = h_x_l * h_y_l a = downsample(params, n_x, (2 **", "np.exp(-(d / 0.01) ** 2))) edge_index.append((i2, i1)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1", "None: self.layers.append(out_nonlinearity()) def forward(self, x): for _, l in enumerate(self.layers): x = l(x)", "edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def assemble(self,", "self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid) rbf", "else: idx, idx_all = self.sample(new_sample=False, index0=index) index = (index + self.m) % self.n", "theta[self.edge_index_12[1]] edge_attr_21 = np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_21[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12,", "* self.d] = theta[self.edge_index_21[0]] edge_attr_21[:, 2 * self.d + 1] = theta[self.edge_index_21[1]] edge_attr_22", "[1, n]) Y2 = np.tile(y2.reshape(1, n), [n, 1]) Y_diff = Y1 - Y2", "== len(split_idx) assert len(pred) == self.num * self.l // batch_size2 out = torch.zeros(self.n,", "for i in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for j", "= f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float)", "theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) def get_boundary(self): s = self.s n = self.n boundary1", "[] edge_index_list_cuda = [] level = int(np.log2(s) - 1) print(level) for l in", "torch.cat([grid_sample,theta_sample],dim=1) pwd = sklearn.metrics.pairwise_distances(grid_sample) edge_index = np.vstack(np.where(pwd <= self.radius)) n_edges = edge_index.shape[1] edge_index", "range(self.level): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l]) edge_index = np.vstack(np.where(pwd <= radius_inner[l])) + index self.edge_index.append(edge_index) edge_index_out.append(torch.tensor(edge_index,", "x in (-1,1): x_j = x_i + x if is_periodic: x_j = x_j", "edge_attr_21 = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_22 = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) else: theta = theta[self.idx_both] edge_attr", "-1)) if theta is None: edge_attr = f(xy[:, 0:self.d], xy[:, self.d:]) else: theta", "x if (x != n_x - 1): edge_index.append((i, i + 1)) edge_attr.append((1, 0,", "interactive neighbors -- their parents are NN but they are not NearestNeighbor edge_index_inter", ") for i in range(self.splits): pred[sample_idx_list[i]] = out_list[i].reshape(-1) return pred # generate graph,", "edge_attr=edge_attr, split_idx=split_idx)) else: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, params=params)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape)", "y/n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x, a2, a1)) X = torch.tensor(grid, dtype=torch.float) # Exact", "- x_j//2)%(s_l//2) <=1: edge_index_inter.append([x_i,x_j]) edge_index_inter = torch.tensor(edge_index_inter, dtype=torch.long) edge_index_inter = edge_index_inter.transpose(0,1) edge_index_list.append(edge_index_inter) edge_index_list_cuda.append(edge_index_inter.cuda())", "self.s else: ny = self.s-1 else: nx = self.s ny = self.s #", "dtype=torch.long) def gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid) rbf = np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf)", "edge_index_out, edge_index_down_out, edge_index_up_out def get_edge_index_range(self): # in order to use graph network's data", "is in shape of batch*n or T*batch*n x = (x * std) +", "torch.cat(self.edge_attr_down, dim=0) edge_attr_up_out = torch.cat(self.edge_attr_up, dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out def splitter(self, radius_inner,", "self.m) split_idx_i = split_idx[i] for j in range(batch_size2): pred_ij = pred_i[j,:] x, y", "edge_index, edge_attr def grid_edge_aug_full(n_x, n_y, r, a): n = n_x * n_y xs", "ys)]).T edge_index = [] edge_attr = [] for i1 in range(n): x1 =", "field): x = self.data[field] if not self.old_mat: x = x[()] x = np.transpose(x,", "resolution self.n = resolution**d self.d = d self.m = m self.l = l", "s, N, is_periodic=False): grid_list = [] theta_list = [] edge_index_list = [] edge_index_list_cuda", "f(xy[:, 0:self.d], xy[:, self.d:]) else: theta = theta[self.idx] edge_attr = f(xy[:, 0:self.d], xy[:,", "0,-1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter = torch.cat([edge_attr_inter1, edge_attr_inter2], dim=0) edge_index_global.append(edge_index_inter) edge_attr_global.append(edge_attr_inter) X = torch.cat(X_global,", "grid_edge_aug(n_x, n_y, a): a = a.reshape(n_x, n_y) xs = np.linspace(0.0, 1.0, n_x) ys", "dtype=torch.long) def torus1d_connectivity(self, r): grid = self.grid_sample pwd0 = sklearn.metrics.pairwise_distances(grid, grid) grid1 =", "grid.reshape(resolution, resolution,2) # self.theta = theta.reshape(resolution, resolution,-1) # self.y = y.reshape(resolution, resolution,1) self.resolution", "X, edge_index, edge_attr def grid_edge(n_x, n_y, a=None): if a != None: a =", "% n_x edge_index.append((i, i2)) edge_index.append((i2, i )) if a != None: a1 =", "for l in range(self.level-1): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l], self.grid_sample[l+1]) edge_index = np.vstack(np.where(pwd <= radius_inter[l]))", "torch.mean(x) self.std = torch.std(x) self.eps = eps def encode(self, x): x = (x", "np.stack([pwd0,pwd1,pwd2,pwd3,pwd4], axis=2) X_DIFF = np.stack([X_diff0,X_diff1,X_diff2,X_diff3,X_diff4], axis=2) Y_DIFF = np.stack([Y_diff0, Y_diff1, Y_diff2, Y_diff3, Y_diff4],", "import Data import torch.nn as nn from scipy.ndimage import gaussian_filter ################################################# # #", "def attributes_boundary(self, f=None, theta=None): # if self.edge_index_boundary == None: # self.boundary_connectivity2d() if f", "self.std = self.std.cpu() # normalization, Gaussian class GaussianNormalizer(object): def __init__(self, x, eps=0.00001): super(GaussianNormalizer,", "range(self.level - 1): edge_attr_down = self.grid_sample_all[self.edge_index_down[l].T].reshape((self.n_edges_inter[l], 2*self.d)) edge_attr_up = self.grid_sample_all[self.edge_index_up[l].T].reshape((self.n_edges_inter[l], 2*self.d)) self.edge_attr_down.append(torch.tensor(edge_attr_down)) self.edge_attr_up.append(torch.tensor(edge_attr_up))", "self.b)/self.a x = x.view(s) return x #loss function with rel/abs Lp loss class", "= np.vstack(np.where(sample)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def get_grid(self): return torch.tensor(self.grid, dtype=torch.float)", "self.grid_sample = [] self.grid_sample_all = None self.edge_index = [] self.edge_index_down = [] self.edge_index_up", "i in range(self.splits): if i==0: idx, idx_all = self.sample(new_sample=True, index0=index) else: idx, idx_all", "== self.splits if is_cuda: pred = torch.zeros(self.n, ).cuda() else: pred = torch.zeros(self.n, )", "!= None: a1 = a[x] a2 = a[x + 1] edge_attr.append((x / n_x,", "+ self.mean return x def cuda(self): self.mean = self.mean.cuda() self.std = self.std.cuda() def", "out_nonlinearity=None, normalize=False): super(DenseNet, self).__init__() self.n_layers = len(layers) - 1 assert self.n_layers >= 1", "= mesh_size[0] self.grid = np.linspace(real_space[0][0], real_space[0][1], self.n).reshape((self.n, 1)) else: self.n = 1 grids", "= np.array(range(2 * s - 1, n, s)) self.boundary = np.concatenate([boundary1, boundary2, boundary3,", "self.mean[:,sample_idx] # x is in shape of batch*n or T*batch*n x = (x", "sample_idx=index_split, params=params) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def assemble(self, pred,", "def grid_edge_aug_full(n_x, n_y, r, a): n = n_x * n_y xs = np.linspace(0.0,", "torch.tensor(self.edge_index, dtype=torch.long) def attributes(self, f=None, theta=None): if f is None: if theta is", "assert len(pred) == self.r**2 // batch_size2 out = torch.zeros((self.resolution,self.resolution)) for i in range(len(pred)):", "= torch.tensor((0, 0,-1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter = torch.cat([edge_attr_inter1, edge_attr_inter2], dim=0) edge_index_global.append(edge_index_inter) edge_attr_global.append(edge_attr_inter) X", ":] + 1 pwd3 = sklearn.metrics.pairwise_distances(grid, grid3) X_diff3, Y_diff3 = self.pairwise_difference(grid, grid3) grid4", "i + 1)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) **", "mean = self.mean else: if len(self.mean.shape) == len(sample_idx[0].shape): std = self.std[sample_idx] + self.eps", "= self.idx self.grid_sample = self.grid self.grid_sample_i = self.grid self.grid_sample_both = self.grid def sample(self):", "sample_sizes[0] self.level = level assert len(sample_sizes) == level assert len(mesh_size) == self.d if", "n = grid1.shape[0] x1 = grid1[:,0] y1 = grid1[:,1] x2 = grid2[:,0] y2", "torch.tensor(index2, dtype=torch.long) edge_index_inter1 = torch.cat([index1,index2], dim=-1).reshape(2,-1) edge_index_inter2 = torch.cat([index2,index1], dim=-1).reshape(2,-1) edge_index_inter = torch.cat([edge_index_inter1,", "= [] def sample(self): self.idx = [] self.grid_sample = [] perm = torch.randperm(self.n)", "edge_attr_up_out def splitter(self, radius_inner, radius_inter, theta_a, theta_all): # give a test mesh, generate", "= a.reshape(n_x, n_y) xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y)", "torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge1d(n_x, a=None): if a != None:", "is None: if theta is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges, -1)) else: theta =", "edge_attr[:, 2 * self.d + 1] = theta[self.edge_index[1]] edge_attr_12 = np.zeros((self.n_edges_12, 3 *", "2*self.d+1] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=idx)) print('test', len(data),", "[] edge_index_up_out = [] index = 0 for l in range(self.level): pwd =", "= self.grid.reshape(self.n, -1)[idx] theta_sample = theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.n, )[idx] grid_split =", "= self.get_edge_index_range() edge_attr, edge_attr_down, edge_attr_up = self.attributes(theta=theta_a) x = torch.cat([grid_all, theta_all[idx_all,:] ], dim=1)", "= self.grid_sample_all[self.edge_index[l].T].reshape( (self.n_edges_inner[l], 2 * self.d)) edge_attr[:, 2 * self.d] = theta[self.edge_index[l][0]] edge_attr[:,", "params=None): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) data = [] for", "// r_l n_l = s_l print('level',s_l,r_l,n_l) xs = np.linspace(0.0, 1.0, s_l) grid_l =", "edge_attr_down = np.zeros((self.n_edges_inter[l], 2 * self.d + 2)) edge_attr_up = np.zeros((self.n_edges_inter[l], 2 *", "for x_i in range(s_l): for x in range(-3,4): x_j = x_i + x", "in np.meshgrid(*grids)]).T if self.m > self.n: self.m = self.n self.idx = np.array(range(self.n)) self.idx_i", "= torch.tensor([x, y], dtype=torch.long).reshape(1, 2) data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split)", "3:3+self.edge_features] = a[edge_index[0]] edge_attr[:, 3+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx", "1 pwd2 = sklearn.metrics.pairwise_distances(grid, grid2) X_diff2, Y_diff2 = self.pairwise_difference(grid, grid2) grid3 = grid", "= torch.cat([X, X_l], dim=1) X_global.append(X) # construct edges index1 = torch.tensor(range(n_l), dtype=torch.long) index1", "axis=2) pwd = np.min(PWD, axis=2) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1]", "real_space, mesh_size, sample_size, induced_point): super(RandomTwoMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_size self.m_i", "if torch.cuda.is_available() else 'cpu') # reading data class MatReader(object): def __init__(self, file_path, to_torch=True,", "return x def decode(self, x): s = x.size() x = x.view(s[0], -1) x", "!= self.n_layers - 1: if normalize: self.layers.append(nn.BatchNorm1d(layers[j+1])) self.layers.append(nonlinearity()) if out_nonlinearity is not None:", "= perm[self.m: self.m+self.m_i] self.idx_both = perm[: self.m+self.m_i] self.grid_sample = self.grid[self.idx] self.grid_sample_i = self.grid[self.idx_i]", "Y1 = np.tile(y1.reshape(n, 1), [1, n]) Y2 = np.tile(y2.reshape(1, n), [n, 1]) Y_diff", "== 0: edge_attr = grid_sample[edge_index.T].reshape(n_edges, -1) else: edge_attr = np.zeros((n_edges, 2*self.d+2)) a =", "3+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1,", "+ x if is_periodic: x_j = x_j % s_l # if (xj, yj)", "[] self.edge_attr_down = [] self.edge_attr_up = [] self.n_edges_inner = [] self.n_edges_inter = []", "range(self.l): perm = torch.randperm(self.n) perm = perm.reshape(self.num, self.m) for j in range(self.num): idx", "def set_cuda(self, to_cuda): self.to_cuda = to_cuda def set_torch(self, to_torch): self.to_torch = to_torch def", "= torch.cat((self.perm[index0:], self.perm[: index_end]), dim=0) self.idx_all = idx_all self.grid_sample_all = self.grid[self.idx_all] return self.idx,", "# reading data class MatReader(object): def __init__(self, file_path, to_torch=True, to_cuda=False, to_float=True): super(MatReader, self).__init__()", "-1)[idx] theta_sample = theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.T, self.n)[:,idx] grid_split = torch.cat([grid_sub, grid_sample],", "for l in range(self.level): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l]) edge_index = np.vstack(np.where(pwd <= radius_inner[l])) +", "pwd_index[np.where(pwd <= self.radius)] PWD_index = (np.where(pwd <= self.radius)[0], np.where(pwd <= self.radius)[1], pwd_index) distance", "self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T self.splits = self.n // self.m #", "x = (x * std) + mean return x def cuda(self): self.mean =", "1)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d", "= theta[self.idx] edge_attr = f(xy[:, 0:self.d], xy[:, self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float)", "in range(self.level): edge_attr = np.zeros((self.n_edges_inner[l], 2 * self.d + 2)) edge_attr[:, 0:2 *", "# number of sub-grid if self.splits * self.m < self.n: self.splits = self.splits", "[] level = int(np.log2(s) - 1) print(level) for l in range(1, level+1): r_l", "self.eps) return x def decode(self, x, sample_idx=None): x = (x * (self.std +", "= torch.cat([grid_sub, grid_sample], dim=0) theta_split = torch.cat([theta_sub, theta_sample], dim=0) Y_split = torch.cat([Y_sub, Y_sample],", "in enumerate(self.layers): x = l(x) if j != self.n_layers - 1: x =", "x.size()[0] diff_norms = torch.norm(x.reshape(num_examples,-1) - y.reshape(num_examples,-1), self.p, 1) y_norms = torch.norm(y.reshape(num_examples,-1), self.p, 1)", "= perm[: self.m+self.m_i] self.grid_sample = self.grid[self.idx] self.grid_sample_i = self.grid[self.idx_i] self.grid_sample_both = self.grid[self.idx_both] return", "[] perm = torch.randperm(self.n) index = 0 for l in range(self.level): self.idx.append(perm[index: index+self.m[l]])", "= m self.l = l self.radius = radius assert self.n % self.m ==", "* ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='constant', cval=0) out = torch.tensor(out, dtype=torch.float) return", "1), [1, n]) Y2 = np.tile(y2.reshape(1, n), [n, 1]) Y_diff = Y1 -", "self.d] = theta[self.edge_index_22[0]] edge_attr_22[:, 2 * self.d + 1] = theta[self.edge_index_22[1]] return torch.tensor(edge_attr,", "s)) boundary4 = np.array(range(2 * s - 1, n, s)) self.boundary = np.concatenate([boundary1,", "def __init__(self, real_space, mesh_size, sample_size, attr_features=1): super(RandomMeshGenerator, self).__init__() self.d = len(real_space) self.m =", "level edge_index_range = torch.zeros((self.level,2), dtype=torch.long) edge_index_down_range = torch.zeros((self.level-1,2), dtype=torch.long) edge_index_up_range = torch.zeros((self.level-1,2), dtype=torch.long)", "= theta.reshape(self.n,-1)[idx] X = torch.cat([grid_sample,theta_sample],dim=1) pwd = sklearn.metrics.pairwise_distances(grid_sample) edge_index = np.vstack(np.where(pwd <= self.radius))", "the edge index shall be stored as tensor instead of list # we", "if index < index_end: idx = self.perm[index: index_end] else: idx = torch.cat((self.perm[index: ],self.perm[:", "print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def sampleT(self, theta, Y, params=None):", "// l) ** 2) return data def simple_grid(n_x, n_y): xs = np.linspace(0.0, 1.0,", "num_nodes edge_index_global.append(edge_index_inner) edge_attr_global.append(edge_attr_inner) # construct X # if (is_high): # X = torch.cat([torch.zeros(n_l,", "for y in range(self.r): grid_sub = self.grid[x::self.r, y::self.r,:].reshape(-1,2) theta_sub = theta[x::self.r, y::self.r,:].reshape(-1,theta_d) perm", "torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge_aug_full(n_x,", "index1 + num_nodes num_nodes += n_l # #construct inter-graph edge if l !=", "edge_features=1, ): super(TorusGridSplitter, self).__init__() self.grid = grid.reshape(resolution, resolution,2) # self.theta = theta.reshape(resolution, resolution,-1)", "in range(1, level+1): r_l = 2 ** (l - 1) s_l = s", "s = x.size() x = x.view(s[0], -1) x = (x - self.b)/self.a x", "len(sample_idx[0].shape): std = self.std[sample_idx] + self.eps # batch*n mean = self.mean[sample_idx] if len(self.mean.shape)", "range(batch_size2): pred_ij = pred_i[j,:].reshape(-1,) idx = split_idx_i[j,:].reshape(-1,) out[idx] = out[idx] + pred_ij out", "index = index0 for l in range(self.level): index = index % self.n index_end", "range(self.level): edge_attr = self.grid_sample_all[self.edge_index[l].T].reshape((self.n_edges_inner[l], 2*self.d)) self.edge_attr.append(torch.tensor(edge_attr)) for l in range(self.level - 1): edge_attr_down", "split and assemble class RandomGridSplitter(object): def __init__(self, grid, resolution, d=2, m=200, l=1, radius=0.25):", "self.idx, self.idx_i, self.idx_both def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float), \\ torch.tensor(self.grid_sample_i, dtype=torch.float), \\ torch.tensor(self.grid_sample_both,", "edge_attr_12[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_12[:, 2 * self.d] = theta[self.edge_index_12[0]]", "l self.radius = radius assert self.n % self.m == 0 self.num = self.n", "0:2 * self.d] = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 * self.d] = theta[self.edge_index[0]] edge_attr[:,", "2 * self.d + 2*self.attr_features)) edge_attr[:, 0:2 * self.d] = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:,", "s_l # if (xj, yj) is a valid node if (x_j in range(s_l)):", "torch.device('cuda' if torch.cuda.is_available() else 'cpu') # reading data class MatReader(object): def __init__(self, file_path,", "np.meshgrid(*grids)]).T self.splits = self.n // self.m # number of sub-grid if self.splits *", "sample_idx=None): x = (x * (self.std + self.eps)) + self.mean return x def", "return torch.tensor(edge_attr, dtype=torch.float) def get_boundary(self): s = self.s n = self.n boundary1 =", "self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T self.idx = [] self.idx_all = None", "edge_index[1, :] = edge_index[1, :] + self.grid_sample[l].shape[0] self.edge_index_down.append(edge_index) edge_index_down_out.append(torch.tensor(edge_index, dtype=torch.long)) self.edge_index_up.append(edge_index[[1,0],:]) edge_index_up_out.append(torch.tensor(edge_index[[1,0],:], dtype=torch.long))", "print(self.edge_index.shape) self.edge_index = self.edge_index[:, self.edge_index[0] >= self.edge_index[1]] print(self.edge_index.shape) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index,", "self.m self.n_edges = self.edge_index.shape[1] self.n_edges_12 = self.edge_index_12.shape[1] self.n_edges_22 = self.edge_index_22.shape[1] return torch.tensor(self.edge_index, dtype=torch.long),", "idx = torch.cat((self.perm[index: ],self.perm[: index_end]), dim=0) self.idx.append(idx) self.grid_sample.append(self.grid[idx]) index = index_end if index0", "a2, a1, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d / 0.1)", "l==1: edge_index_nn = [] for x_i in range(s_l): for x in (-1,1): x_j", "s_l = s // r_l n_l = s_l print('level',s_l,r_l,n_l) xs = np.linspace(0.0, 1.0,", "np.zeros((self.n_edges, 3 * self.d)) edge_attr[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2", "self.resolution) def pairwise_difference(self,grid1, grid2): n = grid1.shape[0] x1 = grid1[:,0] y1 = grid1[:,1]", "(batches) of list (time seq) assert len(pred) == len(split_idx) assert len(pred[0]) == self.T", "to_torch self.to_cuda = to_cuda self.to_float = to_float self.file_path = file_path self.data = None", "def sample(self): self.idx = [] self.grid_sample = [] perm = torch.randperm(self.n) index =", "h_x_l = n_x // (2 ** l) h_y_l = n_y // (2 **", "self.m = m self.T = T self.radius = radius self.edge_features = edge_features self.index", "dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def multi_grid(depth, n_x,", "np.linspace(0.0, 1.0, n_x) # xs = np.array(range(n_x)) # ys = np.array(range(n_y)) edge_index =", "self.reduction = reduction self.size_average = size_average def abs(self, x, y): num_examples = x.size()[0]", "self.ball_connectivity(radius_inner, radius_inter) edge_index_range, edge_index_down_range, edge_index_up_range = self.get_edge_index_range() edge_attr, edge_attr_down, edge_attr_up = self.attributes(theta=theta_a) x", "self.grid def sample(self): perm = torch.randperm(self.n) self.idx = perm[:self.m] self.idx_i = perm[self.m: self.m+self.m_i]", "if self.resolution%2==1: if x==0: nx = self.s else: nx = self.s-1 if y==0:", "= file_path self._load_file() def read_field(self, field): x = self.data[field] if not self.old_mat: x", "= self.std[:,sample_idx]+ self.eps # T*batch*n mean = self.mean[:,sample_idx] # x is in shape", "= torch.cat([grid_sample,theta_sample],dim=1) pwd = sklearn.metrics.pairwise_distances(grid_sample) edge_index = np.vstack(np.where(pwd <= self.radius)) n_edges = edge_index.shape[1]", "n_edge_index = n_edge_index + self.edge_index[l].shape[1] edge_index_range[l, 1] = n_edge_index n_edge_index = 0 for", "assert self.n % self.m == 0 self.num = self.n // self.m # number", "grid): pwd = sklearn.metrics.pairwise_distances(grid) edge_index = np.vstack(np.where(pwd <= self.radius)) n_edges = edge_index.shape[1] return", "is None: if theta is None: edge_attr_boundary = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) else: edge_attr_boundary = np.zeros((self.n_edges_boundary,", "grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub = theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub", "= sample_sizes self.m = sample_sizes[0] self.level = level assert len(sample_sizes) == level assert", "= scipy.io.loadmat(self.file_path) self.old_mat = True except: self.data = h5py.File(self.file_path) self.old_mat = False def", "2] = distance.reshape(n_edges, ) edge_attr[:, 3:3 + self.edge_features] = a[edge_index[0]] edge_attr[:, 3 +", "*= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T if self.m > self.n:", "a2)), np.exp(-(d) ** 2), np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) **", "assert len(sample_sizes) == level assert len(mesh_size) == self.d if self.d == 1: self.n", "RangeNormalizer(object): def __init__(self, x, low=0.0, high=1.0): super(RangeNormalizer, self).__init__() mymin = torch.min(x, 0)[0].view(-1) mymax", "= sklearn.metrics.pairwise_distances(self.grid) rbf = np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf) self.edge_index = np.vstack(np.where(sample)) self.n_edges =", "torch.cat([torch.zeros(n_l, l * 2), X, torch.zeros(n_l, (depth - 1 - l) * 2)],", "else: return torch.sum(all_norms) return all_norms def rel(self, x, y): num_examples = x.size()[0] diff_norms", "y::self.r, :].reshape(-1, theta_d) Y_sub = Y[x::self.r, y::self.r].reshape(-1,) index_sub = self.index[x::self.r, y::self.r].reshape(-1,) n_sub =", "torch.mean(diff_norms/y_norms) else: return torch.sum(diff_norms/y_norms) return diff_norms/y_norms def __call__(self, x, y): return self.rel(x, y)", "real_space[0][1], self.n).reshape((self.n, 1)) else: self.n = 1 grids = [] for j in", "dtype=torch.float) return X, edge_index, edge_attr def grid_edge1d(n_x, a=None): if a != None: a", "h_y_l//2) # torch.repeat is different from numpy index2 = index2.repeat(2, axis = 0).repeat(2,", "torch.tensor(edge_attr_22, dtype=torch.float) # generate multi-level graph class RandomMultiMeshGenerator(object): def __init__(self, real_space, mesh_size, level,", "self.attr_features] = theta[self.edge_index[0]].view(-1, self.attr_features) edge_attr[:, 2 * self.d + self.attr_features: 2 * self.d", "radius_inner, radius_inter): assert len(radius_inner) == self.level assert len(radius_inter) == self.level - 1 self.edge_index", "= self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def attributes(self, f=None, theta=None): if f is None:", "n_edge_index n_edge_index = 0 for l in range(self.level-1): edge_index_down_range[l, 0] = n_edge_index edge_index_up_range[l,", "split_idx=split_idx)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def sample(self, theta, Y): theta_d", "/ n_y a1 = a[x, y] a2 = a[x, y+1] edge_index.append((i, i +", "0] = n_edge_index n_edge_index = n_edge_index + self.edge_index_down[l].shape[1] edge_index_down_range[l, 1] = n_edge_index edge_index_up_range[l,", "(x_j in range(s_l)): # if (xi, yi), (xj, yj) not NearestNeighbor if abs(x)>=2:", "to_torch def set_float(self, to_float): self.to_float = to_float # normalization, pointwise gaussian class UnitGaussianNormalizer(object):", "range(depth): h_x_l = n_x // (2 ** l) h_y_l = n_y // (2", "dim=-1).reshape(2,-1) edge_index_inter = torch.cat([edge_index_inter1, edge_index_inter2], dim=1) edge_attr_inter1 = torch.tensor((0, 0, 1), dtype=torch.float).repeat(n_l, 1)", "= sample_size self.attr_features = attr_features assert len(mesh_size) == self.d if self.d == 1:", "# we concatenate the edge index list and label the range of each", "self.std.cuda() def cpu(self): self.mean = self.mean.cpu() self.std = self.std.cpu() # normalization, scaling by", "edge_attr.append((-1, 0, 0)) if (y != n_y - 1): edge_index.append((i, i + n_x))", "-1, -1)) if self.to_float: x = x.astype(np.float32) if self.to_torch: x = torch.from_numpy(x) if", "= [] self.grid_sample = [] perm = torch.randperm(self.n) index = 0 for l", "self.d)) edge_attr_21[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_21[:, 2 * self.d] =", "self.edge_attr_up.append(torch.tensor(edge_attr_up)) else: theta = theta[self.idx_all] for l in range(self.level): edge_attr = np.zeros((self.n_edges_inner[l], 2", "normalization, pointwise gaussian class UnitGaussianNormalizer(object): def __init__(self, x, eps=0.00001): super(UnitGaussianNormalizer, self).__init__() # x", "= len(boundary) vertice1 = np.array(range(self.n)) vertice1 = np.repeat(vertice1, boundary_size) vertice2 = np.tile(boundary, self.n)", "= grid_sub theta_split = theta_sub Y_split = Y_sub.reshape(self.T, -1) index_split = index_sub.reshape(-1, )", "edge_attr_12[:, 2 * self.d + 1] = theta[self.edge_index_12[1]] edge_attr_21 = np.zeros((self.n_edges_12, 3 *", "Y_sub.reshape(self.T, -1) index_split = index_sub.reshape(-1, ) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges,", "print(level) for l in range(1, level+1): r_l = 2 ** (l - 1)", "edge_index_list_cuda = [] level = int(np.log2(s) - 1) print(level) for l in range(1,", "edge_attr_down_out, edge_attr_up_out def splitter(self, radius_inner, radius_inter, theta_a, theta_all): # give a test mesh,", "f is None: if theta is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges, -1)) else: theta", "index1 = torch.tensor(range(n_l), dtype=torch.long) index1 = index1 + num_nodes num_nodes += n_l #", "* self.d] = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_12[:, 2 * self.d] = theta[self.edge_index_12[0]] edge_attr_12[:, 2", "- y.view(num_examples,-1), self.p, 1) if self.reduction: if self.size_average: return torch.mean(all_norms) else: return torch.sum(all_norms)", "self.s # pred_ij = pred_i[idx : idx + nx * ny] out[x::self.r, y::self.r]", "self.eps)) + self.mean return x def cuda(self): self.mean = self.mean.cuda() self.std = self.std.cuda()", "split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2) if params==None: data = Data(x=X, y=Y_split, edge_index=edge_index,", "= self.a*x + self.b x = x.view(s) return x def decode(self, x): s", "# if (xi, yi), (xj, yj) not NearestNeighbor if abs(x)>=2: # if their", "stored as tensor instead of list # we concatenate the edge index list", "__init__(self, grid, resolution, d=2, m=200, l=1, radius=0.25): super(RandomGridSplitter, self).__init__() self.grid = grid self.resolution", "** 2), np.exp(-(d / 0.01) ** 2))) X = torch.tensor(grid, dtype=torch.float) # Exact", "= resolution if resolution%2==1: self.s = int(((resolution - 1)/r) + 1) else: self.s", "return torch.tensor(self.grid_sample, dtype=torch.float), \\ torch.tensor(self.grid_sample_i, dtype=torch.float), \\ torch.tensor(self.grid_sample_both, dtype=torch.float) def ball_connectivity(self, r11, r12,", "self.d +1] = theta[self.edge_index[1]] else: xy = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) if theta is None: edge_attr", "in range(self.level - 1): edge_attr_down = np.zeros((self.n_edges_inter[l], 2 * self.d + 2)) edge_attr_up", "range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for j in range(batch_size2): pred_ij", "1) if self.reduction: if self.size_average: return torch.mean(diff_norms/y_norms) else: return torch.sum(diff_norms/y_norms) return diff_norms/y_norms def", "self.eps # batch*n mean = self.mean[sample_idx] if len(self.mean.shape) > len(sample_idx[0].shape): std = self.std[:,sample_idx]+", "True except: self.data = h5py.File(self.file_path) self.old_mat = False def load_file(self, file_path): self.file_path =", "= self.splits + 1 print('n:',self.n,' m:',self.m, ' number of splits:', self.splits ) self.perm", "self.m+self.m_i] self.grid_sample = self.grid[self.idx] self.grid_sample_i = self.grid[self.idx_i] self.grid_sample_both = self.grid[self.idx_both] return self.idx, self.idx_i,", "= torch.cat(edge_attr_global, dim=0) mask_index = torch.tensor(range(n_x * n_y), dtype=torch.long) # print('create multi_grid with", "grid_sample], dim=0) theta_split = torch.cat([theta_sub, theta_sample], dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=1).reshape(self.T,-1) index_split", "index % self.n index_end = (index+self.ms[l]) % self.n if index < index_end: idx", "class DenseNet(torch.nn.Module): def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet, self).__init__() self.n_layers = len(layers)", "self.d + 2)) edge_attr[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index[l].T].reshape( (self.n_edges_inner[l], 2 * self.d))", "np.zeros((n_edges, 3+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, 0] = X_difference.reshape(n_edges, ) edge_attr[:, 1]", "distance.reshape(n_edges, ) edge_attr[:, 3:3+self.edge_features] = a[edge_index[0]] edge_attr[:, 3+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr =", "0.1) ** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i + 1, i)) edge_attr.append((d,", "= self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) if theta is None: edge_attr = f(xy[:, 0:self.d], xy[:, self.d:])", "edge_index_nn = [] for x_i in range(s_l): for x in (-1,1): x_j =", "* self.d + 1] = theta[self.edge_index_21[1]] edge_attr_22 = np.zeros((self.n_edges_22, 3 * self.d)) edge_attr_22[:,", "torch.sin(x) return x # generate graphs on square domain class SquareMeshGenerator(object): def __init__(self,", "split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return", "self.size_average: return torch.mean(all_norms) else: return torch.sum(all_norms) return all_norms def rel(self, x, y): num_examples", "ys = np.array(range(n_y)) edge_index = [] edge_attr = [] for x in range(n_x):", "+ mean return x def cuda(self): self.mean = self.mean.cuda() self.std = self.std.cuda() def", "edge_attr_21[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_21[:, 2 * self.d] = theta[self.edge_index_21[0]]", "j in range(self.num): idx = perm[j,:].reshape(-1,) grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] X", "= out[idx] + pred_ij out = out / self.l # out = gaussian_filter(out,", "np.linspace(0.0, 1.0, n_y) grid = np.vstack([xx.ravel() for xx in np.meshgrid(xs, ys)]).T edge_index =", "# ys = np.array(range(n_y)) edge_index = [] edge_attr = [] for x in", "in range(n): x1 = grid[i1] for i2 in range(n): x2 = grid[i2] d", "sigma=sigma, mode='constant', cval=0) out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) # generate graph on", "(x.size()[1] - 1.0) all_norms = (h**(self.d/self.p))*torch.norm(x.view(num_examples,-1) - y.view(num_examples,-1), self.p, 1) if self.reduction: if", "= X_DIFF[PWD_index] Y_difference = Y_DIFF[PWD_index] n_edges = edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges, distance,", "= np.array(range(n_y)) grid = np.vstack([xx.ravel() for xx in np.meshgrid(xs, ys)]).T edge_index = []", "construct the nearest neighbors (NN) if l==1: edge_index_nn = [] for x_i in", "edge_features=1): data = [] for i in range(self.l): perm = torch.randperm(self.n) perm =", "n_edges, distance, X_difference, Y_difference def get_data(self, theta, params=None): theta_d = theta.shape[1] theta =", "edge_index_up=edge_index_up, edge_index_range=edge_index_range, edge_index_down_range=edge_index_down_range, edge_index_up_range=edge_index_up_range, edge_attr_mid=edge_attr, edge_attr_down=edge_attr_down, edge_attr_up=edge_attr_up, sample_idx=idx[0])) return data def assembler(self, out_list,", "out[x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='constant', cval=0) out", "# generate graph, with split and assemble class RandomGridSplitter(object): def __init__(self, grid, resolution,", "y::self.r].reshape(self.T,-1) index_sub = self.index[x::self.r, y::self.r].reshape(-1, ) n_sub = Y_sub.shape[1] if self.m >= n_sub:", "0:2 * self.d] = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) edge_attr_22[:, 2 * self.d] = theta[self.edge_index_22[0]] edge_attr_22[:,", "= self.std[sample_idx] + self.eps # batch*n mean = self.mean[sample_idx] if len(self.mean.shape) > len(sample_idx[0].shape):", "0:2 * self.d] = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_21[:, 2 * self.d] = theta[self.edge_index_21[0]] edge_attr_21[:,", "self.d = len(real_space) self.ms = sample_sizes self.m = sample_sizes[0] self.level = level assert", "super(SquareMeshGenerator, self).__init__() self.d = len(real_space) self.s = mesh_size[0] assert len(mesh_size) == self.d if", "dtype=torch.float) return out.reshape(-1,) # generate graph on Torus, with split and assemble class", "resolution,-1) # self.y = y.reshape(resolution, resolution,1) self.resolution = resolution if resolution%2==1: self.s =", "sklearn.metrics.pairwise_distances(grid, grid4) X_diff4, Y_diff4 = self.pairwise_difference(grid, grid4) PWD = np.stack([pwd0,pwd1,pwd2,pwd3,pwd4], axis=2) X_DIFF =", "def grid_edge1d(n_x, a=None): if a != None: a = a.reshape(n_x) xs = np.linspace(0.0,", "torch.tensor(edge_attr_12, dtype=torch.float), \\ torch.tensor(edge_attr_21, dtype=torch.float), \\ torch.tensor(edge_attr_22, dtype=torch.float) # generate multi-level graph class", "r)) self.n_edges = self.edge_index.shape[1] if is_forward: print(self.edge_index.shape) self.edge_index = self.edge_index[:, self.edge_index[0] >= self.edge_index[1]]", "dtype=torch.float) # generate multi-level graph class RandomMultiMeshGenerator(object): def __init__(self, real_space, mesh_size, level, sample_sizes):", "dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=0).reshape(-1,) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1,) X =", "grid grid3[:, :] = grid[:, :] + 1 pwd3 = sklearn.metrics.pairwise_distances(grid, grid3) X_diff3,", "a != None: a = a.reshape(n_x, n_y) xs = np.linspace(0.0, 1.0, n_x) ys", "theta=None): # if self.edge_index_boundary == None: # self.boundary_connectivity2d() if f is None: if", "= self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) edge_attr_boundary[:, 2 * self.d] = theta[self.edge_index_boundary[0]] edge_attr_boundary[:, 2 * self.d +1]", "self.layers = nn.ModuleList() for j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) if j != self.n_layers", "boundary3, boundary4]) def boundary_connectivity2d(self, stride=1): boundary = self.boundary[::stride] boundary_size = len(boundary) vertice1 =", "* self.d] = theta[self.edge_index[0]] edge_attr[:, 2 * self.d +1] = theta[self.edge_index[1]] else: xy", "grid_sub = self.grid[x::self.r, y::self.r,:].reshape(-1,2) theta_sub = theta[x::self.r, y::self.r,:].reshape(-1,theta_d) perm = torch.randperm(self.n) m =", "self.n_layers = len(layers) - 1 assert self.n_layers >= 1 self.layers = nn.ModuleList() for", "perm[:m] grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] grid_split = torch.cat([grid_sub, grid_sample],dim=0) theta_split =", "np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def gaussian_connectivity(self, sigma): pwd", "self.mean = torch.mean(x) self.std = torch.std(x) self.eps = eps def encode(self, x): x", "= distance.reshape(n_edges, ) edge_attr[:, 3:3+self.edge_features] = a[edge_index[0]] edge_attr[:, 3+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr", "edge_attr[:, 0] = X_difference.reshape(n_edges, ) edge_attr[:, 1] = Y_difference.reshape(n_edges, ) edge_attr[:, 2] =", "= np.array(range(self.n)) vertice1 = np.repeat(vertice1, boundary_size) vertice2 = np.tile(boundary, self.n) self.edge_index_boundary = np.stack([vertice2,", "edge_index_down_out, edge_index_up_out def get_edge_index_range(self): # in order to use graph network's data structure,", "self.s-1 if y==0: ny = self.s else: ny = self.s-1 else: nx =", "d=2, m=200, l=1, radius=0.25): super(RandomGridSplitter, self).__init__() self.grid = grid self.resolution = resolution self.n", "= n_x // (2 ** l) h_y_l = n_y // (2 ** l)", "if abs(x)>=2: # if their parents are NN if abs(x_i//2 - x_j//2)%(s_l//2) <=1:", "batch_size2 out = torch.zeros((self.T, self.resolution,self.resolution)) for t in range(self.T): for i in range(len(pred)):", "torch.max(x, 0)[0].view(-1) self.a = (high - low)/(mymax - mymin) self.b = -self.a*mymax +", "self.n_edges_inner = [] self.n_edges_inter = [] def sample(self): self.idx = [] self.grid_sample =", "= grid[i1] for i2 in range(n): x2 = grid[i2] d = np.linalg.norm(x1-x2) if(d<=r):", "torch.min(x, 0)[0].view(-1) mymax = torch.max(x, 0)[0].view(-1) self.a = (high - low)/(mymax - mymin)", "out_list, sample_idx_list, is_cuda=False): assert len(out_list) == self.splits if is_cuda: pred = torch.zeros(self.n, ).cuda()", "= out.cuda() for i in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i].reshape(batch_size2,", "len(sample_sizes) == level assert len(mesh_size) == self.d if self.d == 1: self.n =", "- 1): d = 1 / n_x a1 = a[x, y] a2 =", "torch.randperm(self.n) index = index0 for l in range(self.level): index = index % self.n", "= torch.zeros((self.T, self.resolution,self.resolution)) for t in range(self.T): for i in range(len(pred)): pred_i =", "= torch.cat([Y_sub, Y_sample], dim=0).reshape(-1,) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1,) X = torch.cat([grid_split, theta_split],", "1) edge_attr_inter2 = torch.tensor((0, 0,-1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter = torch.cat([edge_attr_inter1, edge_attr_inter2], dim=0) edge_index_global.append(edge_index_inter)", "= self.edge_index_12.shape[1] self.n_edges_22 = self.edge_index_22.shape[1] return torch.tensor(self.edge_index, dtype=torch.long), \\ torch.tensor(self.edge_index_12, dtype=torch.long), \\ torch.tensor(self.edge_index_21,", "= self.sample(new_sample=True, index0=index) else: idx, idx_all = self.sample(new_sample=False, index0=index) index = (index +", "y::self.r].reshape(-1, ) n_sub = Y_sub.shape[1] if self.m >= n_sub: m = self.m -", "def get_edge_attr(grid, theta, edge_index): n_edges = edge_index.shape[1] edge_attr = np.zeros((n_edges, 4)) edge_attr[:, 0:2]", "/ 0.01) ** 2))) edge_index.append((i2, i1)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 *", "= n_edge_index + self.edge_index_down[l].shape[1] edge_index_down_range[l, 1] = n_edge_index edge_index_up_range[l, 1] = n_edge_index return", "= np.tile(x1.reshape(n, 1), [1, n]) X2 = np.tile(x2.reshape(1, n), [n, 1]) X_diff =", "(is_high): # X = torch.cat([torch.zeros(n_l, l * 2), X, torch.zeros(n_l, (depth - 1", "# self.theta = theta.reshape(resolution, resolution,-1) # self.y = y.reshape(resolution, resolution,1) self.resolution = resolution", "= a.reshape(n_x) xs = np.linspace(0.0, 1.0, n_x) # xs = np.array(range(n_x)) # ys", "load_file(self, file_path): self.file_path = file_path self._load_file() def read_field(self, field): x = self.data[field] if", "self.edge_features = edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def pairwise_difference(self,grid1, grid2): n =", "shape of ntrain*n or ntrain*T*n or ntrain*n*T self.mean = torch.mean(x, 0) self.std =", "np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_21[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_21[:, 2", "# generate graphs with sampling class RandomMeshGenerator(object): def __init__(self, real_space, mesh_size, sample_size, attr_features=1):", "torch.zeros(self.n, ).cuda() else: pred = torch.zeros(self.n, ) for i in range(self.splits): pred[sample_idx_list[i]] =", "dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape)", "X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:,", "index0=index) else: idx, idx_all = self.sample(new_sample=False, index0=index) index = (index + self.m) %", "len(out_list) == self.splits if is_cuda: pred = torch.zeros(self.n, ).cuda() else: pred = torch.zeros(self.n,", "elif grid == 'grid_edge': X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l, a) elif grid", "2), np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i +", "as tensor instead of list # we concatenate the edge index list and", "1, y] edge_index.append((i, i + 1)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 *", "range(self.level): index = index % self.n index_end = (index+self.ms[l]) % self.n if index", "d > 0 and p > 0 self.d = d self.p = p", "d self.p = p self.reduction = reduction self.size_average = size_average def abs(self, x,", "3 + self.edge_features: 4 + self.edge_features * 2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr,", "dim=0) self.idx.append(idx) self.grid_sample.append(self.grid[idx]) index = index_end if index0 < index_end: idx_all = self.perm[index0:", "Y_sample = Y.reshape(self.n, )[idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0) theta_split = torch.cat([theta_sub, theta_sample],", "but they are not NearestNeighbor edge_index_inter = [] for x_i in range(s_l): for", "X, edge_index_inner, edge_attr_inner = grid(h_y_l, h_x_l) elif grid == 'grid_edge': X, edge_index_inner, edge_attr_inner", "= [] self.edge_attr_up = [] if theta is None: for l in range(self.level):", "print(self.edge_index.shape) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def torus1d_connectivity(self, r): grid = self.grid_sample", "* self.m < self.n: self.splits = self.splits + 1 print('n:',self.n,' m:',self.m, ' number", "r self.n = resolution**2 self.m = m self.T = T self.radius = radius", "induced_point assert len(mesh_size) == self.d if self.d == 1: self.n = mesh_size[0] self.grid", "a test mesh, generate a list of data data = [] index =", "perm = torch.randperm(self.n) idx = perm[:m] grid_sample = self.grid.reshape(self.n, -1)[idx] theta_sample = theta.reshape(self.n,", "edge_index.append((i1, i2)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2),", ">= 1 self.layers = nn.ModuleList() for j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) if j", "torch.tensor(edge_attr_boundary, dtype=torch.float) # generate graphs with sampling class RandomMeshGenerator(object): def __init__(self, real_space, mesh_size,", "self.grid_sample = self.grid[self.idx] self.grid_sample_i = self.grid[self.idx_i] self.grid_sample_both = self.grid[self.idx_both] return self.idx, self.idx_i, self.idx_both", "= self.grid_sample_all[self.edge_index_up[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_up[:, 2 * self.d] = theta[self.edge_index_up[l][0]] edge_attr_up[:,", "m:',self.m, ' number of splits:', self.splits ) self.perm = None self.idx = []", "= to_torch def set_float(self, to_float): self.to_float = to_float # normalization, pointwise gaussian class", "return torch.tensor(edge_attr_boundary, dtype=torch.float) # generate graphs with sampling class RandomMeshGenerator(object): def __init__(self, real_space,", "1) else: self.s = int(resolution/r) self.r = r self.n = resolution**2 self.m =", "sklearn.metrics.pairwise_distances(grid, grid3) X_diff3, Y_diff3 = self.pairwise_difference(grid, grid3) grid4 = grid grid4[:, 0] =", "* n_x + x if (x != n_x - 1): edge_index.append((i, i +", "self.theta = theta.reshape(resolution, resolution,-1) # self.y = y.reshape(resolution, resolution,1) self.resolution = resolution if", "= sklearn.metrics.pairwise_distances(self.grid_sample) rbf = np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf) self.edge_index = np.vstack(np.where(sample)) self.n_edges =", "+ 1] = theta[self.edge_index_down[l][1]] self.edge_attr_down.append(torch.tensor(edge_attr_down, dtype=torch.float)) edge_attr_up[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_up[l].T].reshape( (self.n_edges_inter[l],", "+ self.attr_features] = theta[self.edge_index[0]].view(-1, self.attr_features) edge_attr[:, 2 * self.d + self.attr_features: 2 *", "a1, a2)) edge_attr.append((x / n_x, a2, a1)) X = torch.tensor(xs, dtype=torch.float) # Exact", "self.edge_index_down.append(edge_index) edge_index_down_out.append(torch.tensor(edge_index, dtype=torch.long)) self.edge_index_up.append(edge_index[[1,0],:]) edge_index_up_out.append(torch.tensor(edge_index[[1,0],:], dtype=torch.long)) self.n_edges_inter.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0] edge_index_out", "edge_index = [] edge_attr = [] for y in range(n_y): for x in", "dtype=torch.float) return X, edge_index, edge_attr def grid_edge(n_x, n_y, a=None): if a != None:", "self.attr_features: 2 * self.d + 2*self.attr_features] = theta[self.edge_index[1]].view(-1, self.attr_features) else: xy = self.grid_sample[self.edge_index.T].reshape((self.n_edges,", "- 1): edge_attr_down = np.zeros((self.n_edges_inter[l], 2 * self.d + 2)) edge_attr_up = np.zeros((self.n_edges_inter[l],", "= self.s n = self.n boundary1 = np.array(range(0, s)) boundary2 = np.array(range(n -", "ny = self.s-1 else: nx = self.s ny = self.s # pred_ij =", "theta is None: edge_attr = f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index[0]],", "edge_attr = np.zeros((self.n_edges, 2*self.d+2)) edge_attr[:,0:2*self.d] = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) edge_attr[:, 2 * self.d] = theta[self.edge_index[0]]", "self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def ball_connectivity(self, grid): pwd = sklearn.metrics.pairwise_distances(grid) edge_index =", "a valid node if (x_j in range(s_l)): edge_index_nn.append([x_i,x_j]) edge_index_nn = torch.tensor(edge_index_nn, dtype=torch.long) edge_index_nn", "a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2) data =", "- 1 assert self.n_layers >= 1 self.layers = nn.ModuleList() for j in range(self.n_layers):", "for xx in np.meshgrid(xs, ys)]).T edge_index = [] edge_attr = [] for y", "numpy index2 = index2.repeat(2, axis = 0).repeat(2, axis = 1) index2 = torch.tensor(index2).reshape(-1)", "dtype=torch.float)) for l in range(self.level - 1): edge_attr_down = np.zeros((self.n_edges_inter[l], 2 * self.d", "theta_d) Y = Y.reshape(self.resolution, self.resolution) x = torch.randint(0,self.r,(1,)) y = torch.randint(0,self.r,(1,)) grid_sub =", "self.mean) / (self.std + self.eps) return x def decode(self, x, sample_idx=None): x =", "self.layers.append(nn.Linear(layers[j], layers[j+1])) def forward(self, x): for j, l in enumerate(self.layers): x = l(x)", "= np.zeros((self.n_edges_inter[l], 2 * self.d + 2)) edge_attr_down[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_down[l].T].reshape(", "edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge_aug(n_x, n_y, a): a", "is different from numpy index2 = index2.repeat(2, axis = 0).repeat(2, axis = 1)", "if l != depth-1: index2 = np.array(range(n_l//4)).reshape(h_x_l//2, h_y_l//2) # torch.repeat is different from", "= theta[self.edge_index[0]].view(-1, self.attr_features) edge_attr[:, 2 * self.d + self.attr_features: 2 * self.d +", "= np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T if self.m > self.n: self.m = self.n", "be stored as tensor instead of list # we concatenate the edge index", "= grid[:, :] + 1 pwd3 = sklearn.metrics.pairwise_distances(grid, grid3) X_diff3, Y_diff3 = self.pairwise_difference(grid,", "0 for l in range(self.level): self.idx.append(perm[index: index+self.m[l]]) self.grid_sample.append(self.grid[self.idx[l]]) index = index+self.m[l] self.idx_all =", "= self.grid[self.idx_i] self.grid_sample_both = self.grid[self.idx_both] return self.idx, self.idx_i, self.idx_both def get_grid(self): return torch.tensor(self.grid_sample,", "for j in range(batch_size2): pred_ij = pred_i[j,:] x, y = split_idx_i[j] if self.resolution%2==1:", "################################################# # # Utilities # ################################################# device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')", "edge_index_inter2 = torch.cat([index2,index1], dim=-1).reshape(2,-1) edge_index_inter = torch.cat([edge_index_inter1, edge_index_inter2], dim=1) edge_attr_inter1 = torch.tensor((0, 0,", "x2 = grid[i2] d = np.linalg.norm(x1-x2) if(d<=r): a1 = a[i1] a2 = a[i2]", "x_j = x_i + x # if (xj, yj) is a valid node", "= sklearn.metrics.pairwise_distances(self.grid_sample[l], self.grid_sample[l+1]) edge_index = np.vstack(np.where(pwd <= radius_inter[l])) + index edge_index[1, :] =", "= self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr_12 = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_21 = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_22 =", "1): edge_index.append((i, i + n_x)) edge_attr.append((0, 1, 0)) edge_index.append((i + n_x, i)) edge_attr.append((0,", "'grid_edge': X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l, a) elif grid == 'grid_edge_aug': X,", "# if (xj, yj) is a valid node if (x_j in range(s_l)): edge_index_nn.append([x_i,x_j])", "if theta is None: edge_attr = f(xy[:, 0:self.d], xy[:, self.d:]) else: theta =", "generate graph, with split and assemble with downsample class DownsampleGridSplitter(object): def __init__(self, grid,", "n_x, i)) if a != None: a1 = a[x, y] a2 = a[x,", "mask_index.shape) return (X, edge_index, edge_attr, mask_index, num_nodes) def multi_pole_grid1d(theta, theta_d, s, N, is_periodic=False):", "np.argmin(PWD, axis=2) edge_index = np.vstack(np.where(pwd <= self.radius)) pwd_index = pwd_index[np.where(pwd <= self.radius)] PWD_index", "2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) if params==None: data.append(Data(x=X,", "= grid[:, 1] - 1 pwd4 = sklearn.metrics.pairwise_distances(grid, grid4) X_diff4, Y_diff4 = self.pairwise_difference(grid,", "a1, a2)) edge_attr.append((y/n_y, x/n_x, a2, a1)) if (y != n_y - 1): d", "# out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) # generate multi-level graph, with split", "graph, with split and assemble with downsample class DownsampleGridSplitter(object): def __init__(self, grid, resolution,", "Y.reshape(self.T, self.resolution, self.resolution) x = torch.randint(0, self.r, (1,)) y = torch.randint(0, self.r, (1,))", "h_y_l = n_y // (2 ** l) n_l = h_x_l * h_y_l a", "np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) X = torch.tensor(grid,", "index = index + self.grid_sample[l].shape[0] edge_index_out = torch.cat(edge_index_out, dim=1) edge_index_down_out = torch.cat(edge_index_down_out, dim=1)", "len(layers) - 1 assert self.n_layers >= 1 self.layers = nn.ModuleList() for j in", "= [] perm = torch.randperm(self.n) index = 0 for l in range(self.level): self.idx.append(perm[index:", "(xj, yj) is a valid node if is_periodic: x_j = x_j % s_l", "np.vstack(np.where(pwd <= radius_inner[l])) + index self.edge_index.append(edge_index) edge_index_out.append(torch.tensor(edge_index, dtype=torch.long)) self.n_edges_inner.append(edge_index.shape[1]) index = index +", "torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) print('test', len(data), X.shape, edge_index.shape,", "class UnitGaussianNormalizer(object): def __init__(self, x, eps=0.00001): super(UnitGaussianNormalizer, self).__init__() # x could be in", "+ 1)) edge_index.append((i + 1, i )) if a != None: a1 =", "edge_attr[:, 2] = distance.reshape(n_edges, ) edge_attr[:, 3:3+self.edge_features] = a[edge_index[0]] edge_attr[:, 3+self.edge_features: 4+self.edge_features*2] =", "for l in range(self.level): edge_attr = np.zeros((self.n_edges_inner[l], 2 * self.d + 2)) edge_attr[:,", "= theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) data = [] for x in", "-1)) edge_attr_12[:, 2 * self.d] = theta[self.edge_index_12[0]] edge_attr_12[:, 2 * self.d + 1]", "mymin) self.b = -self.a*mymax + high def encode(self, x): s = x.size() x", "edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def multi_grid(depth, n_x, n_y, grid,", "= perm[:m] grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] grid_split = torch.cat([grid_sub, grid_sample],dim=0) theta_split", "n_y - 1): d = 1 / n_y a1 = a[x, y] a2", "def ball_connectivity(self, r11, r12, r22): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) pwd12 = sklearn.metrics.pairwise_distances(self.grid_sample, self.grid_sample_i) pwd22", "graph for l in range(depth): h_x_l = n_x // (2 ** l) h_y_l", "edge_index_up_range def attributes(self, theta=None): self.edge_attr = [] self.edge_attr_down = [] self.edge_attr_up = []", "axis=2) X_DIFF = np.stack([X_diff0,X_diff1,X_diff2,X_diff3,X_diff4], axis=2) Y_DIFF = np.stack([Y_diff0, Y_diff1, Y_diff2, Y_diff3, Y_diff4], axis=2)", "= self.grid_sample_all[self.edge_index_down[l].T].reshape((self.n_edges_inter[l], 2*self.d)) edge_attr_up = self.grid_sample_all[self.edge_index_up[l].T].reshape((self.n_edges_inter[l], 2*self.d)) self.edge_attr_down.append(torch.tensor(edge_attr_down)) self.edge_attr_up.append(torch.tensor(edge_attr_up)) else: theta = theta[self.idx_all]", "distance = PWD[PWD_index] X_difference = X_DIFF[PWD_index] Y_difference = Y_DIFF[PWD_index] n_edges = edge_index.shape[1] return", "num_examples = x.size()[0] diff_norms = torch.norm(x.reshape(num_examples,-1) - y.reshape(num_examples,-1), self.p, 1) y_norms = torch.norm(y.reshape(num_examples,-1),", "self.std[:,sample_idx]+ self.eps # T*batch*n mean = self.mean[:,sample_idx] # x is in shape of", "return edge_attr_out, edge_attr_down_out, edge_attr_up_out def splitter(self, radius_inner, radius_inter, theta_a, theta_all): # give a", "= self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid) rbf =", "def __init__(self, real_space, mesh_size, sample_size, induced_point): super(RandomTwoMeshGenerator, self).__init__() self.d = len(real_space) self.m =", "dim=1) edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3 +", "x/n_x, a2, a1)) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index", "in range(depth): h_x_l = n_x // (2 ** l) h_y_l = n_y //", "update index edge_index_inner = edge_index_inner + num_nodes edge_index_global.append(edge_index_inner) edge_attr_global.append(edge_attr_inner) # construct X #", "edge_index_down_range = torch.zeros((self.level-1,2), dtype=torch.long) edge_index_up_range = torch.zeros((self.level-1,2), dtype=torch.long) n_edge_index = 0 for l", "perm.reshape(self.num, self.m) for j in range(self.num): idx = perm[j,:].reshape(-1,) grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample", "edge_attr_down, edge_attr_up = self.attributes(theta=theta_a) x = torch.cat([grid_all, theta_all[idx_all,:] ], dim=1) data.append(Data(x=x, edge_index_mid=edge_index, edge_index_down=edge_index_down,", "1): d = 1 / n_x a1 = a[x, y] a2 = a[x", "= torch.randint(0, self.r, (1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub = theta[x::self.r,", "torch.cat([index2,index1], dim=-1).reshape(2,-1) edge_index_inter = torch.cat([edge_index_inter1, edge_index_inter2], dim=1) edge_attr_inter1 = torch.tensor((0, 0, 1), dtype=torch.float).repeat(n_l,", "= np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T def ball_connectivity(self, r): pwd = sklearn.metrics.pairwise_distances(self.grid) self.edge_index", "then compute the interactive neighbors -- their parents are NN but they are", "return grid_list, theta_list, edge_index_list, edge_index_list_cuda def get_edge_attr(grid, theta, edge_index): n_edges = edge_index.shape[1] edge_attr", "assert len(out_list) == self.splits if is_cuda: pred = torch.zeros(self.n, ).cuda() else: pred =", "self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T self.idx = []", "j in range(self.d): grids.append(np.linspace(real_space[j][0], real_space[j][1], mesh_size[j])) self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for", "x in range(n_x): i = y * n_x + x if (x !=", "edge_index = [] edge_attr = [] for i1 in range(n): x1 = grid[i1]", "n_sub = Y_sub.shape[0] if self.m >= n_sub: m = self.m - n_sub perm", "RandomTwoMeshGenerator(object): def __init__(self, real_space, mesh_size, sample_size, induced_point): super(RandomTwoMeshGenerator, self).__init__() self.d = len(real_space) self.m", "grid2[:, 1] = grid[:, 1] + 1 pwd2 = sklearn.metrics.pairwise_distances(grid, grid2) X_diff2, Y_diff2", "NearestNeighbor if abs(x)>=2: # if their parents are NN if abs(x_i//2 - x_j//2)%(s_l//2)", "= np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf) self.edge_index = np.vstack(np.where(sample)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index,", "diff_norms/y_norms def __call__(self, x, y): return self.rel(x, y) # A simple feedforward neural", "index = 0 for l in range(self.level): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l]) edge_index = np.vstack(np.where(pwd", "+ 1] = theta[self.edge_index_12[1]] edge_attr_21 = np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_21[:, 0:2 *", "cuda: out = out.cuda() for i in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i", "self).__init__() self.d = len(real_space) self.ms = sample_sizes self.m = sample_sizes[0] self.level = level", "self.radius)[0], np.where(pwd <= self.radius)[1], pwd_index) distance = PWD[PWD_index] X_difference = X_DIFF[PWD_index] Y_difference =", "a != None: a1 = a[x] a2 = a[x + 1] edge_attr.append((x /", "-1)) edge_attr_12 = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_21 = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_22 = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1))", "print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def assemble(self, pred, split_idx, batch_size2, sigma=1,", "np.tile(y2.reshape(1, n), [n, 1]) Y_diff = Y1 - Y2 return X_diff, Y_diff def", "edge_attr[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index[l].T].reshape( (self.n_edges_inner[l], 2 * self.d)) edge_attr[:, 2 *", "= n_y // (2 ** l) n_l = h_x_l * h_y_l a =", "= None self.edge_index = [] self.edge_index_down = [] self.edge_index_up = [] self.edge_attr =", "= [] self.edge_index_down = [] self.edge_index_up = [] self.n_edges_inner = [] self.n_edges_inter =", ":4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4+self.edge_features] = a[edge_index[0]] edge_attr[:, 4+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]]", "edge_attr_21[:, 2 * self.d] = theta[self.edge_index_21[0]] edge_attr_21[:, 2 * self.d + 1] =", "in range(len(pred)): pred_i = pred[i][t].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for j in range(batch_size2):", "out.reshape(-1,) # generate multi-level graph, with split and assemble class RandomMultiMeshSplitter(object): def __init__(self,", "i1)) edge_index.append((i1, i )) i2 = (x + 2) % n_x edge_index.append((i, i2))", "grid_sub theta_split = theta_sub Y_split = Y_sub.reshape(self.T, -1) index_split = index_sub.reshape(-1, ) X", "= [] self.edge_attr = [] self.edge_attr_down = [] self.edge_attr_up = [] self.n_edges_inner =", "for i in range(self.splits): if i==0: idx, idx_all = self.sample(new_sample=True, index0=index) else: idx,", "f(xy[:, 0:self.d], xy[:, self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) # # generate two-level", "** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i + 1, i)) edge_attr.append((d, a2,", "y): return self.rel(x, y) # A simple feedforward neural network class DenseNet(torch.nn.Module): def", "index = 0 for l in range(self.level-1): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l], self.grid_sample[l+1]) edge_index =", "perm[self.m: self.m+self.m_i] self.idx_both = perm[: self.m+self.m_i] self.grid_sample = self.grid[self.idx] self.grid_sample_i = self.grid[self.idx_i] self.grid_sample_both", "self.to_float: x = x.astype(np.float32) if self.to_torch: x = torch.from_numpy(x) if self.to_cuda: x =", "pred_ij = pred_i[j,:] x, y = split_idx_i[j] if self.resolution%2==1: if x==0: nx =", "split_idx_i[j] if self.resolution%2==1: if x==0: nx = self.s else: nx = self.s-1 if", "def get_grid(self): grid_out = [] for grid in self.grid_sample: grid_out.append(torch.tensor(grid, dtype=torch.float)) return grid_out,", "x): x = (x - self.mean) / (self.std + self.eps) return x def", "self.n_edges_inner = [] self.n_edges_inter = [] edge_index_out = [] edge_index_down_out = [] edge_index_up_out", "torch.tensor(out, dtype=torch.float) return out.reshape(-1,) # generate graph on Torus, with split and assemble", "l(x) if j != self.n_layers - 1: x = torch.sin(x) return x #", "* self.d + 2*self.attr_features] = theta[self.edge_index[1]].view(-1, self.attr_features) else: xy = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) if", "0.1) ** 2), np.exp(-(d / 0.01) ** 2))) X = torch.tensor(grid, dtype=torch.float) #", "sklearn.metrics.pairwise_distances(grid_sample) edge_index = np.vstack(np.where(pwd <= self.radius)) n_edges = edge_index.shape[1] edge_index = torch.tensor(edge_index, dtype=torch.long)", "theta=None): if f is None: if theta is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges, -1))", "a[x, y] a2 = a[x, y+1] edge_attr.append((x/n_x, y/n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x, a2,", "i = y * n_x + x if (x != n_x - 1):", "= perm[:m] grid_sample = self.grid.reshape(self.n, -1)[idx] theta_sample = theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.T,", "dtype=torch.long), n_edges def get_data(self, theta): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d)", "index_end]), dim=0) self.idx.append(idx) self.grid_sample.append(self.grid[idx]) index = index_end if index0 < index_end: idx_all =", "+ self.attr_features: 2 * self.d + 2*self.attr_features] = theta[self.edge_index[1]].view(-1, self.attr_features) else: xy =", "if y==0: ny = self.s else: ny = self.s-1 else: nx = self.s", "a[i2] edge_index.append((i1, i2)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) **", "def __init__(self, real_space, mesh_size): super(SquareMeshGenerator, self).__init__() self.d = len(real_space) self.s = mesh_size[0] assert", "= np.stack([pwd0,pwd1], axis=2) pwd = np.min(PWD, axis=2) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges", "- s, n)) boundary3 = np.array(range(s, n, s)) boundary4 = np.array(range(2 * s", "-1, 0)) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index =", "the interactive neighbors -- their parents are NN but they are not NearestNeighbor", "pointwise gaussian class UnitGaussianNormalizer(object): def __init__(self, x, eps=0.00001): super(UnitGaussianNormalizer, self).__init__() # x could", "mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T self.splits = self.n // self.m", "= grid_edge(h_y_l, h_x_l, a) elif grid == 'grid_edge_aug': X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l,", "print('edge', edge_index_nn.shape) # we then compute the interactive neighbors -- their parents are", "y.reshape(num_examples,-1), self.p, 1) y_norms = torch.norm(y.reshape(num_examples,-1), self.p, 1) if self.reduction: if self.size_average: return", "self).__init__() self.mean = torch.mean(x) self.std = torch.std(x) self.eps = eps def encode(self, x):", "= [] edge_index_out = [] edge_index_down_out = [] edge_index_up_out = [] index =", "x.astype(np.float32) if self.to_torch: x = torch.from_numpy(x) if self.to_cuda: x = x.cuda() return x", "ntrain*n*T self.mean = torch.mean(x, 0) self.std = torch.std(x, 0) self.eps = eps def", "self.m = sample_size self.m_i = induced_point assert len(mesh_size) == self.d if self.d ==", "# pred_ij = pred_i[idx : idx + nx * ny] out[x::self.r, y::self.r] =", "torch.sum(all_norms) return all_norms def rel(self, x, y): num_examples = x.size()[0] diff_norms = torch.norm(x.reshape(num_examples,-1)", "2 * self.d)) edge_attr_up[:, 2 * self.d] = theta[self.edge_index_up[l][0]] edge_attr_up[:, 2 * self.d", "edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge_aug_full(n_x, n_y, r, a):", "yi), (xj, yj) not NearestNeighbor if abs(x)>=2: # if their parents are NN", "0 for l in range(self.level): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l]) edge_index = np.vstack(np.where(pwd <= radius_inner[l]))", "len(pred) == self.r**2 // batch_size2 out = torch.zeros((self.T, self.resolution,self.resolution)) for t in range(self.T):", "theta=None): self.edge_attr = [] self.edge_attr_down = [] self.edge_attr_up = [] if theta is", "real_space[j][1], mesh_size[j])) self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T self.idx", "if out_nonlinearity is not None: self.layers.append(out_nonlinearity()) def forward(self, x): for _, l in", "> self.n: self.m = self.n self.idx = np.array(range(self.n)) self.grid_sample = self.grid def sample(self):", "idx_all = self.sample(new_sample=True, index0=index) else: idx, idx_all = self.sample(new_sample=False, index0=index) index = (index", "edge_attr=edge_attr, split_idx=split_idx, params=params)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def sample(self, theta,", "<= self.radius)[1], pwd_index) distance = PWD[PWD_index] X_difference = X_DIFF[PWD_index] Y_difference = Y_DIFF[PWD_index] n_edges", "x in range(-3,4): x_j = x_i + x # if (xj, yj) is", "= theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub = Y[x::self.r, y::self.r].reshape(-1,) index_sub = self.index[x::self.r, y::self.r].reshape(-1,)", "self.n if index < index_end: idx = self.perm[index: index_end] else: idx = torch.cat((self.perm[index:", "[] self.edge_attr = [] self.edge_attr_down = [] self.edge_attr_up = [] self.n_edges_inner = []", "tensor instead of list # we concatenate the edge index list and label", "edge_attr_up = self.attributes(theta=theta_a) x = torch.cat([grid_all, theta_all[idx_all,:] ], dim=1) data.append(Data(x=x, edge_index_mid=edge_index, edge_index_down=edge_index_down, edge_index_up=edge_index_up,", "out = gaussian_filter(out, sigma=sigma, mode='wrap') out = torch.tensor(out, dtype=torch.float) return out.reshape(self.T,self.n) def downsample(data,", "for x in range(n_x): i = x i1 = (x+1)%n_x edge_index.append((i, i1)) edge_index.append((i1,", "inter-graph edge if l != depth-1: index2 = np.array(range(n_l//4)).reshape(h_x_l//2, h_y_l//2) # torch.repeat is", "self.num = self.n // self.m # number of sub-grid def get_data(self, theta, edge_features=1):", "file_path self._load_file() def read_field(self, field): x = self.data[field] if not self.old_mat: x =", "= int(resolution/r) self.r = r self.n = resolution**2 self.m = m self.radius =", "grid_sample = self.grid.reshape(self.n, -1)[idx] theta_sample = theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.T, self.n)[:,idx] grid_split", "theta_d) Y_sub = Y[:,x::self.r, y::self.r].reshape(self.T,-1) index_sub = self.index[x::self.r, y::self.r].reshape(-1, ) n_sub = Y_sub.shape[1]", "np.zeros((self.n_edges_boundary, 2*self.d+2)) edge_attr_boundary[:,0:2*self.d] = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) edge_attr_boundary[:, 2 * self.d] = theta[self.edge_index_boundary[0]] edge_attr_boundary[:, 2", "= np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y) # xs = np.array(range(n_x))", "= np.zeros((self.n_edges, 2 * self.d + 2*self.attr_features)) edge_attr[:, 0:2 * self.d] = self.grid_sample[self.edge_index.T].reshape((self.n_edges,", "cuda=False): assert len(pred) == len(split_idx) assert len(pred) == self.num * self.l // batch_size2", "edge_attr_down = self.grid_sample_all[self.edge_index_down[l].T].reshape((self.n_edges_inter[l], 2*self.d)) edge_attr_up = self.grid_sample_all[self.edge_index_up[l].T].reshape((self.n_edges_inter[l], 2*self.d)) self.edge_attr_down.append(torch.tensor(edge_attr_down)) self.edge_attr_up.append(torch.tensor(edge_attr_up)) else: theta =", "index_end]), dim=0) self.idx_all = idx_all self.grid_sample_all = self.grid[self.idx_all] return self.idx, self.idx_all def get_grid(self):", "multi_grid(depth, n_x, n_y, grid, params): edge_index_global = [] edge_attr_global = [] X_global =", "= self.sample(new_sample=False, index0=index) index = (index + self.m) % self.n grid, grid_all =", "X_difference.reshape(n_edges, ) edge_attr[:, 1] = Y_difference.reshape(n_edges, ) edge_attr[:, 2] = distance.reshape(n_edges, ) edge_attr[:,", "is None: std = self.std + self.eps # n mean = self.mean else:", "= Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) else: data = Data(x=X, y=Y_split, edge_index=edge_index,", "self.resolution,self.resolution)) for t in range(self.T): for i in range(len(pred)): pred_i = pred[i][t].reshape(batch_size2, self.m)", "-1)) edge_attr[:, 2 * self.d : 2 * self.d + self.attr_features] = theta[self.edge_index[0]].view(-1,", "range(n_x): i = y * n_x + x if (x != n_x -", "GaussianNormalizer(object): def __init__(self, x, eps=0.00001): super(GaussianNormalizer, self).__init__() self.mean = torch.mean(x) self.std = torch.std(x)", "torch.tensor(l, dtype=torch.float).repeat(n_l, 1) # X = torch.cat([X, X_l], dim=1) X_global.append(X) # construct edges", "parents are NN if abs(x_i//2 - x_j//2)%(s_l//2) <=1: edge_index_inter.append([x_i,x_j]) edge_index_inter = torch.tensor(edge_index_inter, dtype=torch.long)", "= pwd_index[np.where(pwd <= self.radius)] PWD_index = (np.where(pwd <= self.radius)[0], np.where(pwd <= self.radius)[1], pwd_index)", "self.resolution%2==1: if x==0: nx = self.s else: nx = self.s-1 if y==0: ny", "edge_attr[:, 2] = distance.reshape(n_edges, ) edge_attr[:, 3:3 + self.edge_features] = a[edge_index[0]] edge_attr[:, 3", "h5py import sklearn.metrics from torch_geometric.data import Data import torch.nn as nn from scipy.ndimage", "self.d] = theta[self.edge_index_up[l][0]] edge_attr_up[:, 2 * self.d + 1] = theta[self.edge_index_up[l][1]] self.edge_attr_up.append(torch.tensor(edge_attr_up, dtype=torch.float))", "def __init__(self, grid, resolution, d=2, m=200, l=1, radius=0.25): super(RandomGridSplitter, self).__init__() self.grid = grid", "self.edge_attr_down.append(torch.tensor(edge_attr_down)) self.edge_attr_up.append(torch.tensor(edge_attr_up)) else: theta = theta[self.idx_all] for l in range(self.level): edge_attr = np.zeros((self.n_edges_inner[l],", "self.idx_i = perm[self.m: self.m+self.m_i] self.idx_both = perm[: self.m+self.m_i] self.grid_sample = self.grid[self.idx] self.grid_sample_i =", "boundary_connectivity2d(self, stride=1): boundary = self.boundary[::stride] boundary_size = len(boundary) vertice1 = np.array(range(self.n)) vertice1 =", "index + self.grid_sample[l].shape[0] edge_index_out = torch.cat(edge_index_out, dim=1) edge_index_down_out = torch.cat(edge_index_down_out, dim=1) edge_index_up_out =", "split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2) data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx,", "torch.cat([grid_sub, grid_sample], dim=0) theta_split = torch.cat([theta_sub, theta_sample], dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=1).reshape(self.T,-1)", "class RandomGridSplitter(object): def __init__(self, grid, resolution, d=2, m=200, l=1, radius=0.25): super(RandomGridSplitter, self).__init__() self.grid", "1): edge_attr_down = np.zeros((self.n_edges_inter[l], 2 * self.d + 2)) edge_attr_up = np.zeros((self.n_edges_inter[l], 2", "+ self.grid_sample[l].shape[0] index = 0 for l in range(self.level-1): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l], self.grid_sample[l+1])", "torch.tensor(out, dtype=torch.float) return out.reshape(self.T,self.n) def downsample(data, grid_size, l): data = data.reshape(-1, grid_size, grid_size)", "with split and assemble class RandomGridSplitter(object): def __init__(self, grid, resolution, d=2, m=200, l=1,", "self.eps # T*batch*n mean = self.mean[:,sample_idx] # x is in shape of batch*n", "= a[x] a2 = a[x + 1] edge_attr.append((x / n_x, a1, a2)) edge_attr.append((x", "x def decode(self, x): s = x.size() x = x.view(s[0], -1) x =", "grid4 = grid grid4[:, 0] = grid[:, 0] + 1 grid4[:, 1] =", "loss class LpLoss(object): def __init__(self, d=2, p=2, size_average=True, reduction=True): super(LpLoss, self).__init__() #Dimension and", "= [] for j in range(self.d): grids.append(np.linspace(real_space[j][0], real_space[j][1], mesh_size[j])) self.n *= mesh_size[j] self.grid", "new_sample=True, index0=0): self.idx = [] self.grid_sample = [] if (new_sample) or (self.perm is", "= x.view(s[0], -1) x = (x - self.b)/self.a x = x.view(s) return x", "sample_size self.attr_features = attr_features assert len(mesh_size) == self.d if self.d == 1: self.n", "edge_attr_down_out = torch.cat(self.edge_attr_down, dim=0) edge_attr_up_out = torch.cat(self.edge_attr_up, dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out def", "= self.m - grid_sub.shape[0] idx = perm[:m] grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx]", "= grid.reshape(resolution, resolution,2) # self.theta = theta.reshape(resolution, resolution,-1) # self.y = y.reshape(resolution, resolution,1)", "[] for i in range(self.l): perm = torch.randperm(self.n) perm = perm.reshape(self.num, self.m) for", "= Y.reshape(self.n, )[idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0) theta_split = torch.cat([theta_sub, theta_sample], dim=0)", "edge_index.append((i + n_x, i)) edge_attr.append((0, -1, 0)) X = torch.tensor(grid, dtype=torch.float) # Exact", "else: theta = theta[self.idx_both] edge_attr = np.zeros((self.n_edges, 3 * self.d)) edge_attr[:, 0:2 *", "index0=index) index = (index + self.m) % self.n grid, grid_all = self.get_grid() edge_index,", "def __init__(self, real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshGenerator, self).__init__() self.d = len(real_space) self.m =", "index_sub.reshape(-1, ) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges, distance, X_difference, Y_difference =", "np.vstack(np.where(pwd <= self.radius)) n_edges = edge_index.shape[1] edge_index = torch.tensor(edge_index, dtype=torch.long) if edge_features ==", "= theta[self.edge_index_21[1]] edge_attr_22 = np.zeros((self.n_edges_22, 3 * self.d)) edge_attr_22[:, 0:2 * self.d] =", ")) if a != None: a1 = a[x, y] a2 = a[x +", "self.edge_index_boundary == None: # self.boundary_connectivity2d() if f is None: if theta is None:", "-1)) edge_attr_22 = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) else: theta = theta[self.idx_both] edge_attr = np.zeros((self.n_edges, 3", "+ 2)) edge_attr_down[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_down[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_down[:,", "xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y) grid = np.vstack([xx.ravel()", "-1) x = (x - self.b)/self.a x = x.view(s) return x #loss function", "self.to_torch = to_torch self.to_cuda = to_cuda self.to_float = to_float self.file_path = file_path self.data", "0.1) ** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i2, i1)) edge_attr.append((d, a2, a1,", "** 2))) edge_index.append((i2, i1)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d)", "ball_connectivity(self, r): pwd = sklearn.metrics.pairwise_distances(self.grid) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1]", "1] - 1 pwd4 = sklearn.metrics.pairwise_distances(grid, grid4) X_diff4, Y_diff4 = self.pairwise_difference(grid, grid4) PWD", "dim=1) edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr = np.zeros((n_edges, 4+self.edge_features*2)) a = theta_split[:, :self.edge_features]", "xs = np.array(range(n_x)) # ys = np.array(range(n_y)) grid = np.vstack([xx.ravel() for xx in", "edge_attr = [] for x in range(n_x): i = x i1 = (x+1)%n_x", "return data def sample(self, theta, Y): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution,", "np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i2, i1)) edge_attr.append((d,", "edge if l != depth-1: index2 = np.array(range(n_l//4)).reshape(h_x_l//2, h_y_l//2) # torch.repeat is different", "theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.T, self.resolution, self.resolution)", "None: a = a.reshape(n_x) xs = np.linspace(0.0, 1.0, n_x) # xs = np.array(range(n_x))", "Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def sampleT(self, theta, Y, params=None): theta_d =", "grid_size, grid_size) data = data[:, ::l, ::l] data = data.reshape(-1, (grid_size // l)", "theta = theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.T, self.resolution, self.resolution) x = torch.randint(0,", "pwd = sklearn.metrics.pairwise_distances(self.grid_sample) pwd12 = sklearn.metrics.pairwise_distances(self.grid_sample, self.grid_sample_i) pwd22 = sklearn.metrics.pairwise_distances(self.grid_sample_i) self.edge_index = np.vstack(np.where(pwd", "def sample(self, theta, Y): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) Y", "[] for y in range(n_y): for x in range(n_x): i = y *", "to_torch=True, to_cuda=False, to_float=True): super(MatReader, self).__init__() self.to_torch = to_torch self.to_cuda = to_cuda self.to_float =", "== 'grid_edge_aug': X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l, a) # update index edge_index_inner", "2] = distance.reshape(n_edges, ) edge_attr[:, 3:3+self.edge_features] = a[edge_index[0]] edge_attr[:, 3+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]]", "self.edge_index.shape[1] if is_forward: print(self.edge_index.shape) self.edge_index = self.edge_index[:, self.edge_index[0] >= self.edge_index[1]] print(self.edge_index.shape) self.n_edges =", "as nn from scipy.ndimage import gaussian_filter ################################################# # # Utilities # ################################################# device", "a2)) edge_attr.append((y/n_y, x/n_x, a2, a1)) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact,", "# self.y = y.reshape(resolution, resolution,1) self.resolution = resolution if resolution%2==1: self.s = int(((resolution", "edge_index_up_out = torch.cat(edge_index_up_out, dim=1) return edge_index_out, edge_index_down_out, edge_index_up_out def get_edge_index_range(self): # in order", "self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, 0] = X_difference.reshape(n_edges,", "torch.randperm(self.n) self.idx = perm[:self.m] self.idx_i = perm[self.m: self.m+self.m_i] self.idx_both = perm[: self.m+self.m_i] self.grid_sample", "= [] self.n_edges_inter = [] def sample(self): self.idx = [] self.grid_sample = []", "= a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2) if", "self.d = len(real_space) self.m = sample_sizes self.level = level assert len(sample_sizes) == level", "downsample class DownsampleGridSplitter(object): def __init__(self, grid, resolution, r, m=100, radius=0.15, edge_features=1): super(DownsampleGridSplitter, self).__init__()", "= self.s else: nx = self.s-1 if y==0: ny = self.s else: ny", "dtype=torch.long) if edge_features == 0: edge_attr = grid_sample[edge_index.T].reshape(n_edges, -1) else: edge_attr = np.zeros((n_edges,", "= np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_21[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_21[:,", "domain class SquareMeshGenerator(object): def __init__(self, real_space, mesh_size): super(SquareMeshGenerator, self).__init__() self.d = len(real_space) self.s", "self.mean = self.mean.cuda() self.std = self.std.cuda() def cpu(self): self.mean = self.mean.cpu() self.std =", "np.meshgrid(*grids)]).T def ball_connectivity(self, r): pwd = sklearn.metrics.pairwise_distances(self.grid) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges", "* self.d)) edge_attr_up[:, 2 * self.d] = theta[self.edge_index_up[l][0]] edge_attr_up[:, 2 * self.d +", "torch.randperm(self.n) idx = perm[:m] grid_sample = self.grid.reshape(self.n, -1)[idx] theta_sample = theta.reshape(self.n, -1)[idx] Y_sample", "x = l(x) if j != self.n_layers - 1: x = torch.sin(x) return", "torch.cat(self.edge_attr_down, dim=0) edge_attr_up_out = torch.cat(self.edge_attr_up, dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out # generate graph,", "** 2), np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) if", "torch.zeros(self.n, ) for i in range(self.splits): pred[sample_idx_list[i]] = out_list[i].reshape(-1) return pred # generate", "yj) not NearestNeighbor if abs(x)>=2: # if their parents are NN if abs(x_i//2", "gaussian_filter(out, sigma=sigma, mode='wrap') out = torch.tensor(out, dtype=torch.float) return out.reshape(self.T,self.n) def downsample(data, grid_size, l):", "X.shape, edge_index.shape, edge_attr.shape, mask_index.shape) return (X, edge_index, edge_attr, mask_index, num_nodes) def multi_pole_grid1d(theta, theta_d,", "= to_cuda def set_torch(self, to_torch): self.to_torch = to_torch def set_float(self, to_float): self.to_float =", "Y_diff0 = self.pairwise_difference(grid, grid) grid1 = grid grid1[:,0] = grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid,", "sample sub-grids, here we downsample sub-grids self.grid = grid.reshape(resolution, resolution,2) # self.theta =", "class TorusGridSplitter(object): def __init__(self, grid, resolution, r, m=100, radius=0.15, T=None, edge_features=1, ): super(TorusGridSplitter,", "# out = gaussian_filter(out, sigma=sigma, mode='constant', cval=0) # out = torch.tensor(out, dtype=torch.float) return", ": idx + nx * ny] out[x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out", "l) n_l = h_x_l * h_y_l a = downsample(params, n_x, (2 ** l))", "self.pairwise_difference(grid, grid4) PWD = np.stack([pwd0,pwd1,pwd2,pwd3,pwd4], axis=2) X_DIFF = np.stack([X_diff0,X_diff1,X_diff2,X_diff3,X_diff4], axis=2) Y_DIFF = np.stack([Y_diff0,", "np.exp(-(d / 0.01) ** 2))) if (y != n_y - 1): d =", "downsample(params, n_x, (2 ** l)) if grid == 'grid': X, edge_index_inner, edge_attr_inner =", "number of sub-grid if self.splits * self.m < self.n: self.splits = self.splits +", "def sampleT(self, theta, Y, params=None): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d)", "x_i + x # if (xj, yj) is a valid node if is_periodic:", "= (x+1)%n_x edge_index.append((i, i1)) edge_index.append((i1, i )) i2 = (x + 2) %", "h_y_l a = downsample(params, n_x, (2 ** l)) if grid == 'grid': X,", "theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr = np.zeros((n_edges, 4+self.edge_features*2)) a", ":].reshape(-1, theta_d) Y_sub = Y[x::self.r, y::self.r].reshape(-1,) index_sub = self.index[x::self.r, y::self.r].reshape(-1,) n_sub = Y_sub.shape[0]", "torch.norm(x.reshape(num_examples,-1) - y.reshape(num_examples,-1), self.p, 1) y_norms = torch.norm(y.reshape(num_examples,-1), self.p, 1) if self.reduction: if", "pred_ij out = out / self.l # out = gaussian_filter(out, sigma=sigma, mode='constant', cval=0)", "################################################# device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # reading data class MatReader(object):", "self.boundary[::stride] boundary_size = len(boundary) vertice1 = np.array(range(self.n)) vertice1 = np.repeat(vertice1, boundary_size) vertice2 =", "// batch_size2 out = torch.zeros((self.T, self.resolution,self.resolution)) for t in range(self.T): for i in", "= pred_i[idx : idx + nx * ny] out[t, x::self.r, y::self.r] = pred_ij[:nx", "data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split, params=params) print('train', X.shape, Y_split.shape, edge_index.shape,", "np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T self.idx = [] self.idx_all = None self.grid_sample =", "edge_attr = np.zeros((self.n_edges_inner[l], 2 * self.d + 2)) edge_attr[:, 0:2 * self.d] =", "= np.tile(x2.reshape(1, n), [n, 1]) X_diff = X1 - X2 Y1 = np.tile(y1.reshape(n,", "* 2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1,", "= (x + 2) % n_x edge_index.append((i, i2)) edge_index.append((i2, i )) if a", "range(1, level+1): r_l = 2 ** (l - 1) s_l = s //", "sampling class RandomMeshGenerator(object): def __init__(self, real_space, mesh_size, sample_size, attr_features=1): super(RandomMeshGenerator, self).__init__() self.d =", "import h5py import sklearn.metrics from torch_geometric.data import Data import torch.nn as nn from", "[] for x_i in range(s_l): for x in range(-3,4): x_j = x_i +", "3 + self.edge_features * 2)) a = theta_split[:, :self.edge_features] edge_attr[:, 0] = X_difference.reshape(n_edges,", "self.n) self.edge_index_boundary = np.stack([vertice2, vertice1], axis=0) self.n_edges_boundary = self.edge_index_boundary.shape[1] return torch.tensor(self.edge_index_boundary, dtype=torch.long) def", "Y_sample = Y.reshape(self.T, self.n)[:,idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0) theta_split = torch.cat([theta_sub, theta_sample],", "self.d)) edge_attr_down[:, 2 * self.d] = theta[self.edge_index_down[l][0]] edge_attr_down[:, 2 * self.d + 1]", "n_l, theta_d) theta_l = torch.tensor(theta_l, dtype=torch.float) print(theta_l.shape) theta_list.append(theta_l) # for the finest level,", "= r self.n = resolution**2 self.m = m self.radius = radius self.edge_features =", "in range(self.T): for i in range(len(pred)): pred_i = pred[i][t].reshape(batch_size2, self.m) split_idx_i = split_idx[i]", "gaussian class UnitGaussianNormalizer(object): def __init__(self, x, eps=0.00001): super(UnitGaussianNormalizer, self).__init__() # x could be", "1.0, n_x) # xs = np.array(range(n_x)) # ys = np.array(range(n_y)) edge_index = []", "return edge_index_range, edge_index_down_range, edge_index_up_range def attributes(self, theta=None): self.edge_attr = [] self.edge_attr_down = []", "** 2), np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i2,", "s // r_l n_l = s_l print('level',s_l,r_l,n_l) xs = np.linspace(0.0, 1.0, s_l) grid_l", "self.edge_index = [] self.edge_index_down = [] self.edge_index_up = [] self.edge_attr = [] self.edge_attr_down", "with split and assemble with downsample class DownsampleGridSplitter(object): def __init__(self, grid, resolution, r,", "= torch.zeros((self.level,2), dtype=torch.long) edge_index_down_range = torch.zeros((self.level-1,2), dtype=torch.long) edge_index_up_range = torch.zeros((self.level-1,2), dtype=torch.long) n_edge_index =", "self.grid_sample[l].shape[0] self.edge_index_down.append(edge_index) edge_index_down_out.append(torch.tensor(edge_index, dtype=torch.long)) self.edge_index_up.append(edge_index[[1,0],:]) edge_index_up_out.append(torch.tensor(edge_index[[1,0],:], dtype=torch.long)) self.n_edges_inter.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0]", "= nn.ModuleList() for j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) def forward(self, x): for j,", "def assemble(self, pred, split_idx, batch_size2, sigma=1, cuda=False): assert len(pred) == len(split_idx) assert len(pred)", "sklearn.metrics.pairwise_distances(self.grid) rbf = np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf) self.edge_index = np.vstack(np.where(sample)) self.n_edges = self.edge_index.shape[1]", "theta_l = theta_l.reshape(N, n_l, theta_d) theta_l = torch.tensor(theta_l, dtype=torch.float) print(theta_l.shape) theta_list.append(theta_l) # for", "edge_index.shape, edge_attr.shape) return data def assemble(self, pred, split_idx, batch_size2, sigma=1, cuda=False): assert len(pred)", "a1 = a[x, y] a2 = a[x + 1, y] edge_attr.append((x / n_x,", "theta is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges, -1)) else: theta = theta[self.idx] edge_attr =", "x, y): num_examples = x.size()[0] diff_norms = torch.norm(x.reshape(num_examples,-1) - y.reshape(num_examples,-1), self.p, 1) y_norms", "to_torch): self.to_torch = to_torch def set_float(self, to_float): self.to_float = to_float # normalization, pointwise", "x, y): return self.rel(x, y) # A simple feedforward neural network class DenseNet(torch.nn.Module):", "2*self.d)) self.edge_attr_down.append(torch.tensor(edge_attr_down)) self.edge_attr_up.append(torch.tensor(edge_attr_up)) else: theta = theta[self.idx_all] for l in range(self.level): edge_attr =", "self.d] = theta[self.edge_index[l][0]] edge_attr[:, 2 * self.d + 1] = theta[self.edge_index[l][1]] self.edge_attr.append(torch.tensor(edge_attr, dtype=torch.float))", "Y_diff1, Y_diff2, Y_diff3, Y_diff4], axis=2) pwd = np.min(PWD, axis=2) pwd_index = np.argmin(PWD, axis=2)", "sample_idx=index_split) else: data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split, params=params) print('train', X.shape,", "edge_index_nn.shape) # we then compute the interactive neighbors -- their parents are NN", "self).__init__() #Dimension and Lp-norm type are postive assert d > 0 and p", "1 / n_y a1 = a[x, y] a2 = a[x, y+1] edge_index.append((i, i", "edge_attr_down[:, 2 * self.d] = theta[self.edge_index_down[l][0]] edge_attr_down[:, 2 * self.d + 1] =", "Y[x::self.r, y::self.r].reshape(-1,) index_sub = self.index[x::self.r, y::self.r].reshape(-1,) n_sub = Y_sub.shape[0] if self.m >= n_sub:", "nx * ny] out[x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma,", "1 grid4[:, 1] = grid[:, 1] - 1 pwd4 = sklearn.metrics.pairwise_distances(grid, grid4) X_diff4,", "radius_inner, radius_inter, theta_a, theta_all): # give a test mesh, generate a list of", "= theta_sub Y_split = Y_sub.reshape(self.T, -1) index_split = index_sub.reshape(-1, ) X = torch.cat([grid_split,", "else: xy = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) if theta is None: edge_attr = f(xy[:, 0:self.d],", "1 self.layers = nn.ModuleList() for j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) if j !=", "edge_index = np.vstack(np.where(pwd <= self.radius)) n_edges = edge_index.shape[1] edge_index = torch.tensor(edge_index, dtype=torch.long) if", "theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) # # generate two-level graph class RandomTwoMeshGenerator(object): def __init__(self,", "self.splits = self.n // self.m # number of sub-grid if self.splits * self.m", "= self.grid def sample(self): perm = torch.randperm(self.n) self.idx = perm[:self.m] self.grid_sample = self.grid[self.idx]", "self.m+self.m_i] self.idx_both = perm[: self.m+self.m_i] self.grid_sample = self.grid[self.idx] self.grid_sample_i = self.grid[self.idx_i] self.grid_sample_both =", "edge_attr_down[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_down[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_down[:, 2 *", "self.splits + 1 print('n:',self.n,' m:',self.m, ' number of splits:', self.splits ) self.perm =", "1] = grid[:, 1] - 1 pwd4 = sklearn.metrics.pairwise_distances(grid, grid4) X_diff4, Y_diff4 =", "return torch.tensor(edge_index, dtype=torch.long), n_edges, distance, X_difference, Y_difference def get_data(self, theta, params=None): theta_d =", "ntrain*n or ntrain*T*n or ntrain*n*T self.mean = torch.mean(x, 0) self.std = torch.std(x, 0)", "n_x, i)) edge_attr.append((0, -1, 0)) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact,", "if self.size_average: return torch.mean(all_norms) else: return torch.sum(all_norms) return all_norms def rel(self, x, y):", "X.shape, edge_index.shape, edge_attr.shape) return data def assemble(self, pred, split_idx, batch_size2, sigma=1, cuda=False): assert", "// (2 ** l) h_y_l = n_y // (2 ** l) n_l =", "import scipy.io import h5py import sklearn.metrics from torch_geometric.data import Data import torch.nn as", "= torch.cat([index1,index2], dim=-1).reshape(2,-1) edge_index_inter2 = torch.cat([index2,index1], dim=-1).reshape(2,-1) edge_index_inter = torch.cat([edge_index_inter1, edge_index_inter2], dim=1) edge_attr_inter1", "None: a1 = a[x] a2 = a[x + 1] edge_attr.append((x / n_x, a1,", "def get_edge_index_range(self): # in order to use graph network's data structure, # the", "= torch.tensor((0, 0, 1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter2 = torch.tensor((0, 0,-1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter", "with sampling class RandomMeshGenerator(object): def __init__(self, real_space, mesh_size, sample_size, attr_features=1): super(RandomMeshGenerator, self).__init__() self.d", "= self.mean.cuda() self.std = self.std.cuda() def cpu(self): self.mean = self.mean.cpu() self.std = self.std.cpu()", "return torch.tensor(edge_attr, dtype=torch.float), \\ torch.tensor(edge_attr_12, dtype=torch.float), \\ torch.tensor(edge_attr_21, dtype=torch.float), \\ torch.tensor(edge_attr_22, dtype=torch.float) #", ") if cuda: out = out.cuda() for i in range(len(pred)): pred_i = pred[i].reshape(batch_size2,", "2), np.exp(-(d / 0.01) ** 2))) X = torch.tensor(grid, dtype=torch.float) # Exact =", "X_diff3, Y_diff3 = self.pairwise_difference(grid, grid3) grid4 = grid grid4[:, 0] = grid[:, 0]", "not self.old_mat: x = x[()] x = np.transpose(x, axes=range(len(x.shape) - 1, -1, -1))", "in np.meshgrid(*grids)]).T if self.m > self.n: self.m = self.n self.idx = np.array(range(self.n)) self.grid_sample", "params==None: data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) else: data = Data(x=X,", "self).__init__() mymin = torch.min(x, 0)[0].view(-1) mymax = torch.max(x, 0)[0].view(-1) self.a = (high -", "= [] for y in range(n_y): for x in range(n_x): i = y", "x1 = grid[i1] for i2 in range(n): x2 = grid[i2] d = np.linalg.norm(x1-x2)", "and assemble class RandomGridSplitter(object): def __init__(self, grid, resolution, d=2, m=200, l=1, radius=0.25): super(RandomGridSplitter,", "out = out / self.l # out = gaussian_filter(out, sigma=sigma, mode='constant', cval=0) #", "l in range(depth): h_x_l = n_x // (2 ** l) h_y_l = n_y", "else: theta = theta[self.idx] edge_attr = f(xy[:, 0:self.d], xy[:, self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return", "self.idx_all def get_grid(self): grid_out = [] for grid in self.grid_sample: grid_out.append(torch.tensor(grid, dtype=torch.float)) return", "+ self.eps)) + self.mean return x def cuda(self): self.mean = self.mean.cuda() self.std =", "= a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2) data", "pwd4 = sklearn.metrics.pairwise_distances(grid, grid4) X_diff4, Y_diff4 = self.pairwise_difference(grid, grid4) PWD = np.stack([pwd0,pwd1,pwd2,pwd3,pwd4], axis=2)", "+ self.b x = x.view(s) return x def decode(self, x): s = x.size()", "h_x_l * h_y_l a = downsample(params, n_x, (2 ** l)) if grid ==", "mask_index = torch.tensor(range(n_x * n_y), dtype=torch.long) # print('create multi_grid with size:', X.shape, edge_index.shape,", "for j in range(batch_size2): pred_ij = pred_i[j,:].reshape(-1,) idx = split_idx_i[j,:].reshape(-1,) out[idx] = out[idx]", "s_l print('level',s_l,r_l,n_l) xs = np.linspace(0.0, 1.0, s_l) grid_l = xs grid_l = torch.tensor(grid_l,", "self.grid[self.edge_index.T].reshape((self.n_edges,-1)) edge_attr[:, 2 * self.d] = theta[self.edge_index[0]] edge_attr[:, 2 * self.d +1] =", "self.edge_attr_up = [] self.n_edges_inner = [] self.n_edges_inter = [] def sample(self): self.idx =", "def ball_connectivity(self, r, is_forward=False): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges", "- 1 self.edge_index = [] self.edge_index_down = [] self.edge_index_up = [] self.n_edges_inner =", "__init__(self, real_space, mesh_size, sample_size, induced_point): super(RandomTwoMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_size", "if is_periodic: x_j = x_j % s_l if (x_j in range(s_l)): # if", "f=None, theta=None): if f is None: if theta is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges,-1))", "= np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] if is_forward: print(self.edge_index.shape) self.edge_index = self.edge_index[:,", "grid4) X_diff4, Y_diff4 = self.pairwise_difference(grid, grid4) PWD = np.stack([pwd0,pwd1,pwd2,pwd3,pwd4], axis=2) X_DIFF = np.stack([X_diff0,X_diff1,X_diff2,X_diff3,X_diff4],", "self.d] = self.grid_sample_all[self.edge_index[l].T].reshape( (self.n_edges_inner[l], 2 * self.d)) edge_attr[:, 2 * self.d] = theta[self.edge_index[l][0]]", "theta[self.edge_index_boundary[0]], theta[self.edge_index_boundary[1]]) return torch.tensor(edge_attr_boundary, dtype=torch.float) # generate graphs with sampling class RandomMeshGenerator(object): def", "torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) if params==None: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) else: data.append(Data(x=X,", "-1)[idx] Y_sample = Y.reshape(self.n, )[idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0) theta_split = torch.cat([theta_sub,", "theta[self.edge_index_12[0]] edge_attr_12[:, 2 * self.d + 1] = theta[self.edge_index_12[1]] edge_attr_21 = np.zeros((self.n_edges_12, 3", "torch.tensor(edge_attr_21, dtype=torch.float), \\ torch.tensor(edge_attr_22, dtype=torch.float) # generate multi-level graph class RandomMultiMeshGenerator(object): def __init__(self,", "self.r**2 // batch_size2 out = torch.zeros((self.resolution,self.resolution)) for i in range(len(pred)): pred_i = pred[i].reshape(batch_size2,", "each level edge_index_range = torch.zeros((self.level,2), dtype=torch.long) edge_index_down_range = torch.zeros((self.level-1,2), dtype=torch.long) edge_index_up_range = torch.zeros((self.level-1,2),", "all_norms = (h**(self.d/self.p))*torch.norm(x.view(num_examples,-1) - y.view(num_examples,-1), self.p, 1) if self.reduction: if self.size_average: return torch.mean(all_norms)", "* self.d + 2)) edge_attr[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index[l].T].reshape( (self.n_edges_inner[l], 2 *", "dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge(n_x, n_y,", "= a[x, y+1] edge_index.append((i, i + n_x)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1", "grid == 'grid_edge_aug': X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l, a) # update index", "edge_attr_up_out = torch.cat(self.edge_attr_up, dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out # generate graph, with split", "mode='constant', cval=0) out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) # generate graph on Torus,", "super(DenseNet, self).__init__() self.n_layers = len(layers) - 1 assert self.n_layers >= 1 self.layers =", "= theta[self.idx_all] for l in range(self.level): edge_attr = np.zeros((self.n_edges_inner[l], 2 * self.d +", "for j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) if j != self.n_layers - 1: if", "self.grid_sample.append(self.grid[idx]) index = index_end if index0 < index_end: idx_all = self.perm[index0: index_end] else:", "grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] grid_split = torch.cat([grid_sub, grid_sample],dim=0) theta_split = torch.cat([theta_sub,", "= grid1.shape[0] x1 = grid1[:,0] y1 = grid1[:,1] x2 = grid2[:,0] y2 =", "theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) data = [] for x", "pwd = np.min(PWD, axis=2) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] return", "theta_sample], dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=0).reshape(-1,) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1,) X", "2)) edge_attr_up = np.zeros((self.n_edges_inter[l], 2 * self.d + 2)) edge_attr_down[:, 0:2 * self.d]", "1.0, n_y) grid = np.vstack([xx.ravel() for xx in np.meshgrid(xs, ys)]).T edge_index = []", "of list (time seq) assert len(pred) == len(split_idx) assert len(pred[0]) == self.T assert", "= grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4 + self.edge_features] = a[edge_index[0]] edge_attr[:, 4 + self.edge_features:", "edge_index_nn = edge_index_nn.transpose(0,1) edge_index_list.append(edge_index_nn) edge_index_list_cuda.append(edge_index_nn.cuda()) print('edge', edge_index_nn.shape) # we then compute the interactive", "torch.tensor(self.grid, dtype=torch.float) def attributes(self, f=None, theta=None): if f is None: if theta is", "= self.grid[self.idx] self.grid_sample_i = self.grid[self.idx_i] self.grid_sample_both = self.grid[self.idx_both] return self.idx, self.idx_i, self.idx_both def", "self.radius)] PWD_index = (np.where(pwd <= self.radius)[0], np.where(pwd <= self.radius)[1], pwd_index) distance = PWD[PWD_index]", "edge_attr, edge_attr_down, edge_attr_up = self.attributes(theta=theta_a) x = torch.cat([grid_all, theta_all[idx_all,:] ], dim=1) data.append(Data(x=x, edge_index_mid=edge_index,", "self.d + 1] = theta[self.edge_index_up[l][1]] self.edge_attr_up.append(torch.tensor(edge_attr_up, dtype=torch.float)) edge_attr_out = torch.cat(self.edge_attr, dim=0) edge_attr_down_out =", "self.layers = nn.ModuleList() for j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) def forward(self, x): for", "else: edge_attr = np.zeros((n_edges, 2*self.d+2)) a = theta_sample[:,0] edge_attr[:, :2*self.d] = grid_sample[edge_index.T].reshape(n_edges, -1)", "r)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid_sample)", "a[x] a2 = a[x + 1] edge_attr.append((x / n_x, a1, a2)) edge_attr.append((x /", "n_x edge_index.append((i, i + 1)) edge_index.append((i + 1, i )) if a !=", "return X, edge_index, edge_attr def grid_edge_aug(n_x, n_y, a): a = a.reshape(n_x, n_y) xs", "= grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) PWD = np.stack([pwd0,pwd1], axis=2) pwd = np.min(PWD,", "index = index+self.m[l] self.idx_all = perm[:index] self.grid_sample_all = self.grid[self.idx_all] return self.idx, self.idx_all def", "= torch.tensor(range(n_l), dtype=torch.long) index1 = index1 + num_nodes num_nodes += n_l # #construct", "self.grid_sample pwd0 = sklearn.metrics.pairwise_distances(grid, grid) grid1 = grid grid1[:,0] = grid[:,0]+1 pwd1 =", "else: theta = theta[self.idx_all] for l in range(self.level): edge_attr = np.zeros((self.n_edges_inner[l], 2 *", "self).__init__() self.d = len(real_space) self.m = sample_size self.attr_features = attr_features assert len(mesh_size) ==", "boundary_size = len(boundary) vertice1 = np.array(range(self.n)) vertice1 = np.repeat(vertice1, boundary_size) vertice2 = np.tile(boundary,", "1) print(level) for l in range(1, level+1): r_l = 2 ** (l -", "finest level, we construct the nearest neighbors (NN) if l==1: edge_index_nn = []", "= True except: self.data = h5py.File(self.file_path) self.old_mat = False def load_file(self, file_path): self.file_path", "self.file_path = file_path self._load_file() def read_field(self, field): x = self.data[field] if not self.old_mat:", "len(radius_inner) == self.level assert len(radius_inter) == self.level - 1 self.edge_index = [] self.edge_index_down", "theta = theta[self.idx_all] for l in range(self.level): edge_attr = np.zeros((self.n_edges_inner[l], 2 * self.d", "grid_sample[edge_index.T].reshape(n_edges, -1) else: edge_attr = np.zeros((n_edges, 2*self.d+2)) a = theta_sample[:,0] edge_attr[:, :2*self.d] =", "= gaussian_filter(out, sigma=sigma, mode='constant', cval=0) # out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) #", "= torch.randperm(self.n) index = index0 for l in range(self.level): index = index %", "are postive assert d > 0 and p > 0 self.d = d", "Torus, with split and assemble class TorusGridSplitter(object): def __init__(self, grid, resolution, r, m=100,", "self.edge_index = np.vstack(np.where(pwd <= r11)) self.edge_index_12 = np.vstack(np.where(pwd12 <= r12)) self.edge_index_12[1,:] = self.edge_index_12[1,:]", "data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=idx)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def assemble(self,", "self.idx def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float) def ball_connectivity(self, r, is_forward=False): pwd = sklearn.metrics.pairwise_distances(self.grid_sample)", "index_split.shape) return data def assemble(self, pred, split_idx, batch_size2, sigma=1): assert len(pred) == len(split_idx)", "= np.random.binomial(1,rbf) self.edge_index = np.vstack(np.where(sample)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def get_grid(self):", "4 + self.edge_features: 4 + self.edge_features * 2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr,", "a[x, y+1] edge_index.append((i, i + n_x)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 *", "torch.cat([edge_attr_inter1, edge_attr_inter2], dim=0) edge_index_global.append(edge_index_inter) edge_attr_global.append(edge_attr_inter) X = torch.cat(X_global, dim=0) edge_index = torch.cat(edge_index_global, dim=1)", "sklearn.metrics.pairwise_distances(self.grid_sample) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] if is_forward: print(self.edge_index.shape) self.edge_index", "def sample(self): perm = torch.randperm(self.n) self.idx = perm[:self.m] self.grid_sample = self.grid[self.idx] return self.idx", "= torch.cat(self.edge_attr_down, dim=0) edge_attr_up_out = torch.cat(self.edge_attr_up, dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out def splitter(self,", "= np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y) grid = np.vstack([xx.ravel() for", "edge_attr[:, 0:2 * self.d] = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 * self.d : 2", "[] self.edge_attr_up = [] if theta is None: for l in range(self.level): edge_attr", "<= r12)) self.edge_index_12[1,:] = self.edge_index_12[1,:] + self.m self.edge_index_21 = self.edge_index_12[[1,0],:] self.edge_index_22 = np.vstack(np.where(pwd22", "= theta[:,:,:theta_d].reshape(N, s, theta_d) theta_l = theta_l[:, ::r_l, :] theta_l = theta_l.reshape(N, n_l,", "boundary = self.boundary[::stride] boundary_size = len(boundary) vertice1 = np.array(range(self.n)) vertice1 = np.repeat(vertice1, boundary_size)", "= n_edge_index edge_index_up_range[l, 0] = n_edge_index n_edge_index = n_edge_index + self.edge_index_down[l].shape[1] edge_index_down_range[l, 1]", "idx, idx_all = self.sample(new_sample=False, index0=index) index = (index + self.m) % self.n grid,", "/ 0.01) ** 2))) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1)", "is_forward=False): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] if", "np.vstack(np.where(pwd22 <= r22)) + self.m self.n_edges = self.edge_index.shape[1] self.n_edges_12 = self.edge_index_12.shape[1] self.n_edges_22 =", "self.n % self.m == 0 self.num = self.n // self.m # number of", "2 * self.d + 1] = theta[self.edge_index_21[1]] edge_attr_22 = np.zeros((self.n_edges_22, 3 * self.d))", "self.m) split_idx_i = split_idx[i].reshape(batch_size2, self.m) for j in range(batch_size2): pred_ij = pred_i[j,:].reshape(-1,) idx", "in range(self.level): index = index % self.n index_end = (index+self.ms[l]) % self.n if", "<= radius_inner[l])) + index self.edge_index.append(edge_index) edge_index_out.append(torch.tensor(edge_index, dtype=torch.long)) self.n_edges_inner.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0]", "dim=1) edge_index_down_out = torch.cat(edge_index_down_out, dim=1) edge_index_up_out = torch.cat(edge_index_up_out, dim=1) return edge_index_out, edge_index_down_out, edge_index_up_out", "function with rel/abs Lp loss class LpLoss(object): def __init__(self, d=2, p=2, size_average=True, reduction=True):", "1.0, n_y) # xs = np.array(range(n_x)) # ys = np.array(range(n_y)) grid = np.vstack([xx.ravel()", "= grid2[:,1] X1 = np.tile(x1.reshape(n, 1), [1, n]) X2 = np.tile(x2.reshape(1, n), [n,", "+ self.m self.edge_index_21 = self.edge_index_12[[1,0],:] self.edge_index_22 = np.vstack(np.where(pwd22 <= r22)) + self.m self.n_edges", "= [] index = 0 for l in range(self.level): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l]) edge_index", "= torch.min(x, 0)[0].view(-1) mymax = torch.max(x, 0)[0].view(-1) self.a = (high - low)/(mymax -", "edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) print('test', len(data),", "self.s = int(resolution/r) self.r = r self.n = resolution**2 self.m = m self.radius", "= n_x * n_y xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0,", "pwd_index = pwd_index[np.where(pwd <= self.radius)] PWD_index = (np.where(pwd <= self.radius)[0], np.where(pwd <= self.radius)[1],", "np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i + n_x,", "self.std.cpu() # normalization, scaling by range class RangeNormalizer(object): def __init__(self, x, low=0.0, high=1.0):", "super(RandomGridSplitter, self).__init__() self.grid = grid self.resolution = resolution self.n = resolution**d self.d =", "pwd0 = sklearn.metrics.pairwise_distances(grid, grid) grid1 = grid grid1[:,0] = grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid,", "rel/abs Lp loss class LpLoss(object): def __init__(self, d=2, p=2, size_average=True, reduction=True): super(LpLoss, self).__init__()", "self.n = 1 grids = [] for j in range(self.d): grids.append(np.linspace(real_space[j][0], real_space[j][1], mesh_size[j]))", "else: idx_all = torch.cat((self.perm[index0:], self.perm[: index_end]), dim=0) self.idx_all = idx_all self.grid_sample_all = self.grid[self.idx_all]", "theta_sample = theta.reshape(self.n,-1)[idx] X = torch.cat([grid_sample,theta_sample],dim=1) pwd = sklearn.metrics.pairwise_distances(grid_sample) edge_index = np.vstack(np.where(pwd <=", "pwd3 = sklearn.metrics.pairwise_distances(grid, grid3) X_diff3, Y_diff3 = self.pairwise_difference(grid, grid3) grid4 = grid grid4[:,", "return X, edge_index, edge_attr def multi_grid(depth, n_x, n_y, grid, params): edge_index_global = []", "size:', X.shape, edge_index.shape, edge_attr.shape, mask_index.shape) return (X, edge_index, edge_attr, mask_index, num_nodes) def multi_pole_grid1d(theta,", "edge_attr[:, 0:2] = grid[edge_index.transpose(0,1)].reshape((n_edges, -1)) edge_attr[:, 2] = theta[edge_index[0]] edge_attr[:, 3] = theta[edge_index[1]]", "data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape,", "is None: edge_attr = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr_12 = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_21 = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12,", "dtype=torch.float)) edge_attr_up[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_up[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_up[:, 2", "= theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.resolution, self.resolution) x =", "edge_attr.shape, index_split.shape) return data def sampleT(self, theta, Y, params=None): theta_d = theta.shape[1] theta", "y / n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x, a2, a1)) if (y != n_y", "a[x, y+1] edge_attr.append((x/n_x, y/n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x, a2, a1)) X = torch.tensor(grid,", "data = [] index = 0 for i in range(self.splits): if i==0: idx,", "reduction=True): super(LpLoss, self).__init__() #Dimension and Lp-norm type are postive assert d > 0", "= self.pairwise_difference(grid, grid2) grid3 = grid grid3[:, :] = grid[:, :] + 1", "= Y_sub.shape[1] if self.m >= n_sub: m = self.m - n_sub perm =", "= np.array(range(n_l//4)).reshape(h_x_l//2, h_y_l//2) # torch.repeat is different from numpy index2 = index2.repeat(2, axis", "theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) # # generate two-level graph class RandomTwoMeshGenerator(object): def", "= torch.cat([edge_attr_inter1, edge_attr_inter2], dim=0) edge_index_global.append(edge_index_inter) edge_attr_global.append(edge_attr_inter) X = torch.cat(X_global, dim=0) edge_index = torch.cat(edge_index_global,", "* h_y_l a = downsample(params, n_x, (2 ** l)) if grid == 'grid':", "X, edge_index, edge_attr def grid_edge_aug_full(n_x, n_y, r, a): n = n_x * n_y", "/ 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) if (y != n_y", "self.edge_index.append(edge_index) edge_index_out.append(torch.tensor(edge_index, dtype=torch.long)) self.n_edges_inner.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0] index = 0 for", "self.grid_sample[l].shape[0] edge_index_out = torch.cat(edge_index_out, dim=1) edge_index_down_out = torch.cat(edge_index_down_out, dim=1) edge_index_up_out = torch.cat(edge_index_up_out, dim=1)", "= np.zeros((self.n_edges_inner[l], 2 * self.d + 2)) edge_attr[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index[l].T].reshape(", "m self.T = T self.radius = radius self.edge_features = edge_features self.index = torch.tensor(range(self.n),", "= None self.idx = [] self.idx_all = None self.grid_sample = [] self.grid_sample_all =", "Y_sample], dim=1).reshape(self.T,-1) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1, ) X = torch.cat([grid_split, theta_split], dim=1)", "class DenseNet_sin(torch.nn.Module): def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet_sin, self).__init__() self.n_layers = len(layers)", "torch.tensor(self.grid_sample_i, dtype=torch.float), \\ torch.tensor(self.grid_sample_both, dtype=torch.float) def ball_connectivity(self, r11, r12, r22): pwd = sklearn.metrics.pairwise_distances(self.grid_sample)", "edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) else: data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split,", "pred is a list (batches) of list (time seq) assert len(pred) == len(split_idx)", "np.tile(x1.reshape(n, 1), [1, n]) X2 = np.tile(x2.reshape(1, n), [n, 1]) X_diff = X1", "x_j//2)%(s_l//2) <=1: edge_index_inter.append([x_i,x_j]) edge_index_inter = torch.tensor(edge_index_inter, dtype=torch.long) edge_index_inter = edge_index_inter.transpose(0,1) edge_index_list.append(edge_index_inter) edge_index_list_cuda.append(edge_index_inter.cuda()) print('edge_inter',", "def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float), \\ torch.tensor(self.grid_sample_i, dtype=torch.float), \\ torch.tensor(self.grid_sample_both, dtype=torch.float) def ball_connectivity(self,", "# pred is a list (batches) of list (time seq) assert len(pred) ==", "data[:, ::l, ::l] data = data.reshape(-1, (grid_size // l) ** 2) return data", "edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges def get_data(self, theta): theta_d = theta.shape[1] theta =", "edge_index[1, :] + self.grid_sample[l].shape[0] self.edge_index_down.append(edge_index) edge_index_down_out.append(torch.tensor(edge_index, dtype=torch.long)) self.edge_index_up.append(edge_index[[1,0],:]) edge_index_up_out.append(torch.tensor(edge_index[[1,0],:], dtype=torch.long)) self.n_edges_inter.append(edge_index.shape[1]) index =", "theta_list.append(theta_l) # for the finest level, we construct the nearest neighbors (NN) if", "X_difference = X_DIFF[PWD_index] Y_difference = Y_DIFF[PWD_index] n_edges = edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges,", "np.meshgrid(*grids)]).T if self.m > self.n: self.m = self.n self.idx = np.array(range(self.n)) self.grid_sample =", "s_l if (x_j in range(s_l)): # if (xi, yi), (xj, yj) not NearestNeighbor", "edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) if params==None: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx))", "dtype=torch.float) def attributes(self, f=None, theta=None): if f is None: if theta is None:", "* self.d] = theta[self.edge_index_22[0]] edge_attr_22[:, 2 * self.d + 1] = theta[self.edge_index_22[1]] return", "3 * self.d)) edge_attr_12[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_12[:, 2 *", "pwd = sklearn.metrics.pairwise_distances(grid) edge_index = np.vstack(np.where(pwd <= self.radius)) n_edges = edge_index.shape[1] return torch.tensor(edge_index,", "return x class DenseNet_sin(torch.nn.Module): def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet_sin, self).__init__() self.n_layers", "= np.stack([X_diff0,X_diff1,X_diff2,X_diff3,X_diff4], axis=2) Y_DIFF = np.stack([Y_diff0, Y_diff1, Y_diff2, Y_diff3, Y_diff4], axis=2) pwd =", "self.ball_connectivity(grid_split) edge_attr = np.zeros((n_edges, 4+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges,", "self.index[x::self.r, y::self.r].reshape(-1,) n_sub = Y_sub.shape[0] if self.m >= n_sub: m = self.m -", "a = theta_split[:, :self.edge_features] edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4 + self.edge_features]", "self.d)) edge_attr[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 * self.d] =", "[] edge_attr = [] for i1 in range(n): x1 = grid[i1] for i2", "s_l) grid_l = xs grid_l = torch.tensor(grid_l, dtype=torch.float) print(grid_l.shape) grid_list.append(grid_l) theta_l = theta[:,:,:theta_d].reshape(N,", "n, s)) boundary4 = np.array(range(2 * s - 1, n, s)) self.boundary =", "l in range(self.level): edge_attr = np.zeros((self.n_edges_inner[l], 2 * self.d + 2)) edge_attr[:, 0:2", "edge_index_inter.transpose(0,1) edge_index_list.append(edge_index_inter) edge_index_list_cuda.append(edge_index_inter.cuda()) print('edge_inter', edge_index_inter.shape) print(len(grid_list),len(edge_index_list),len(theta_list)) return grid_list, theta_list, edge_index_list, edge_index_list_cuda def get_edge_attr(grid,", "= torch.device('cuda' if torch.cuda.is_available() else 'cpu') # reading data class MatReader(object): def __init__(self,", "self.mean[sample_idx] if len(self.mean.shape) > len(sample_idx[0].shape): std = self.std[:,sample_idx]+ self.eps # T*batch*n mean =", "x = x.view(s[0], -1) x = self.a*x + self.b x = x.view(s) return", "pred_i[idx : idx + nx * ny] out[x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny)", "with rel/abs Lp loss class LpLoss(object): def __init__(self, d=2, p=2, size_average=True, reduction=True): super(LpLoss,", "dtype=torch.long) edge_index_inter1 = torch.cat([index1,index2], dim=-1).reshape(2,-1) edge_index_inter2 = torch.cat([index2,index1], dim=-1).reshape(2,-1) edge_index_inter = torch.cat([edge_index_inter1, edge_index_inter2],", "np.zeros((n_edges, 4)) edge_attr[:, 0:2] = grid[edge_index.transpose(0,1)].reshape((n_edges, -1)) edge_attr[:, 2] = theta[edge_index[0]] edge_attr[:, 3]", "super(DownsampleGridSplitter, self).__init__() # instead of randomly sample sub-grids, here we downsample sub-grids self.grid", "= torch.cat([Y_sub, Y_sample], dim=1).reshape(self.T,-1) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1, ) X = torch.cat([grid_split,", "self.edge_index = [] self.edge_index_down = [] self.edge_index_up = [] self.n_edges_inner = [] self.n_edges_inter", "2 * self.d] = theta[self.edge_index_down[l][0]] edge_attr_down[:, 2 * self.d + 1] = theta[self.edge_index_down[l][1]]", "if self.to_torch: x = torch.from_numpy(x) if self.to_cuda: x = x.cuda() return x def", "= (x - self.b)/self.a x = x.view(s) return x #loss function with rel/abs", "self.d + 2)) edge_attr_up = np.zeros((self.n_edges_inter[l], 2 * self.d + 2)) edge_attr_down[:, 0:2", "diff_norms = torch.norm(x.reshape(num_examples,-1) - y.reshape(num_examples,-1), self.p, 1) y_norms = torch.norm(y.reshape(num_examples,-1), self.p, 1) if", "= theta[self.edge_index[1]] edge_attr_12 = np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_12[:, 0:2 * self.d] =", "grid1 = grid grid1[:,0] = grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) X_diff1, Y_diff1 =", "edge_attr = self.grid_sample_all[self.edge_index[l].T].reshape((self.n_edges_inner[l], 2*self.d)) self.edge_attr.append(torch.tensor(edge_attr)) for l in range(self.level - 1): edge_attr_down =", "= self.index[x::self.r, y::self.r].reshape(-1, ) n_sub = Y_sub.shape[1] if self.m >= n_sub: m =", "= n_edge_index n_edge_index = n_edge_index + self.edge_index[l].shape[1] edge_index_range[l, 1] = n_edge_index n_edge_index =", "= a[x + 1] edge_attr.append((x / n_x, a1, a2)) edge_attr.append((x / n_x, a2,", "dim=1) X_global.append(X) # construct edges index1 = torch.tensor(range(n_l), dtype=torch.long) index1 = index1 +", "grid_list.append(grid_l) theta_l = theta[:,:,:theta_d].reshape(N, s, theta_d) theta_l = theta_l[:, ::r_l, :] theta_l =", "dim=0) edge_attr_up_out = torch.cat(self.edge_attr_up, dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out # generate graph, with", "def __init__(self, x, low=0.0, high=1.0): super(RangeNormalizer, self).__init__() mymin = torch.min(x, 0)[0].view(-1) mymax =", "y==0: ny = self.s else: ny = self.s-1 else: nx = self.s ny", "np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d / 0.1) ** 2), np.exp(-(d /", "n_y // (2 ** l) n_l = h_x_l * h_y_l a = downsample(params,", "np.array(range(0, s)) boundary2 = np.array(range(n - s, n)) boundary3 = np.array(range(s, n, s))", "[] def sample(self, new_sample=True, index0=0): self.idx = [] self.grid_sample = [] if (new_sample)", "0:2 * self.d] = self.grid_sample_all[self.edge_index_down[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_down[:, 2 * self.d]", "self.s = int(((resolution - 1)/r) + 1) else: self.s = int(resolution/r) self.r =", "self.b = -self.a*mymax + high def encode(self, x): s = x.size() x =", "= theta[self.edge_index_boundary[1]] else: xy = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) if theta is None: edge_attr_boundary = f(xy[:,0:self.d],", "None: if theta is None: edge_attr_boundary = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) else: edge_attr_boundary = np.zeros((self.n_edges_boundary, 2*self.d+2))", "self.idx, self.idx_all def get_grid(self): grid_out = [] for grid in self.grid_sample: grid_out.append(torch.tensor(grid, dtype=torch.float))", "grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4 + self.edge_features] = a[edge_index[0]] edge_attr[:, 4 + self.edge_features: 4", "y1 = grid1[:,1] x2 = grid2[:,0] y2 = grid2[:,1] X1 = np.tile(x1.reshape(n, 1),", "= a[edge_index[0]] edge_attr[:, 3 + self.edge_features: 4 + self.edge_features * 2] = a[edge_index[1]]", "sampleT(self, theta, Y, params=None): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) Y", "i + n_x)) edge_attr.append((0, 1, 0)) edge_index.append((i + n_x, i)) edge_attr.append((0, -1, 0))", "n_x a1 = a[x, y] a2 = a[x + 1, y] edge_index.append((i, i", "1) if self.reduction: if self.size_average: return torch.mean(all_norms) else: return torch.sum(all_norms) return all_norms def", "gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) rbf = np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf) self.edge_index =", "* self.d] = theta[self.edge_index[l][0]] edge_attr[:, 2 * self.d + 1] = theta[self.edge_index[l][1]] self.edge_attr.append(torch.tensor(edge_attr,", "= [] edge_attr = [] for i1 in range(n): x1 = grid[i1] for", "if abs(x_i//2 - x_j//2)%(s_l//2) <=1: edge_index_inter.append([x_i,x_j]) edge_index_inter = torch.tensor(edge_index_inter, dtype=torch.long) edge_index_inter = edge_index_inter.transpose(0,1)", "out = torch.zeros((self.T, self.resolution,self.resolution)) for t in range(self.T): for i in range(len(pred)): pred_i", "with split and assemble class RandomMultiMeshSplitter(object): def __init__(self, real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshSplitter,", "dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr =", "theta[self.idx_all] for l in range(self.level): edge_attr = np.zeros((self.n_edges_inner[l], 2 * self.d + 2))", "1: self.n = mesh_size[0] self.grid = np.linspace(real_space[0][0], real_space[0][1], self.n).reshape((self.n, 1)) else: self.n =", "def torus_connectivity(self, grid): pwd0 = sklearn.metrics.pairwise_distances(grid, grid) X_diff0, Y_diff0 = self.pairwise_difference(grid, grid) grid1", "[] self.n_edges_inner = [] self.n_edges_inter = [] def sample(self): self.idx = [] self.grid_sample", "self.grid_sample = [] if (new_sample) or (self.perm is None): self.perm = torch.randperm(self.n) index", "torch.cat([theta_sub, theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr", "\\ torch.tensor(self.edge_index_12, dtype=torch.long), \\ torch.tensor(self.edge_index_21, dtype=torch.long), \\ torch.tensor(self.edge_index_22, dtype=torch.long) def attributes(self, theta=None): if", "xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y) # xs =", ")) if a != None: a1 = a[x] a2 = a[x + 1]", "edge_index_up_out.append(torch.tensor(edge_index[[1,0],:], dtype=torch.long)) self.n_edges_inter.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0] edge_index_out = torch.cat(edge_index_out, dim=1) edge_index_down_out", "edge_attr_inter1 = torch.tensor((0, 0, 1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter2 = torch.tensor((0, 0,-1), dtype=torch.float).repeat(n_l, 1)", "self.edge_index_12[[1,0],:] self.edge_index_22 = np.vstack(np.where(pwd22 <= r22)) + self.m self.n_edges = self.edge_index.shape[1] self.n_edges_12 =", "= torch.randperm(self.n) index = 0 for l in range(self.level): self.idx.append(perm[index: index+self.m[l]]) self.grid_sample.append(self.grid[self.idx[l]]) index", "if (is_high): # X = torch.cat([torch.zeros(n_l, l * 2), X, torch.zeros(n_l, (depth -", "self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def torus1d_connectivity(self, r): grid = self.grid_sample pwd0", "range class RangeNormalizer(object): def __init__(self, x, low=0.0, high=1.0): super(RangeNormalizer, self).__init__() mymin = torch.min(x,", "self).__init__() self.d = len(real_space) self.m = sample_sizes self.level = level assert len(sample_sizes) ==", "= theta_split[:, :self.edge_features] edge_attr[:, 0] = X_difference.reshape(n_edges, ) edge_attr[:, 1] = Y_difference.reshape(n_edges, )", "self.n = resolution**2 self.m = m self.radius = radius self.edge_features = edge_features self.index", "Y2 return X_diff, Y_diff def torus_connectivity(self, grid): pwd0 = sklearn.metrics.pairwise_distances(grid, grid) X_diff0, Y_diff0", "dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=1).reshape(self.T,-1) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1, ) X", "= gaussian_filter(out, sigma=sigma, mode='wrap') out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) def assembleT(self, pred,", "= None self.grid_sample = [] self.grid_sample_all = None self.edge_index = [] self.edge_index_down =", "in range(s_l)): # if (xi, yi), (xj, yj) not NearestNeighbor if abs(x)>=2: #", "1 self.edge_index = [] self.edge_index_down = [] self.edge_index_up = [] self.n_edges_inner = []", "data def assembler(self, out_list, sample_idx_list, is_cuda=False): assert len(out_list) == self.splits if is_cuda: pred", "pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='wrap') out = torch.tensor(out, dtype=torch.float) return", "nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet, self).__init__() self.n_layers = len(layers) - 1 assert self.n_layers >=", "= torch.zeros((self.resolution,self.resolution)) for i in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i]", "i )) if a != None: a1 = a[x, y] a2 = a[x", "self.y = y.reshape(resolution, resolution,1) self.resolution = resolution if resolution%2==1: self.s = int(((resolution -", "-1)) else: theta = theta[self.idx_both] edge_attr = np.zeros((self.n_edges, 3 * self.d)) edge_attr[:, 0:2", "self.edge_index[0] >= self.edge_index[1]] print(self.edge_index.shape) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def torus1d_connectivity(self, r):", "y] a2 = a[x, y+1] edge_index.append((i, i + n_x)) edge_attr.append((d, a1, a2, 1", "self.splits ) self.perm = None self.idx = [] self.idx_all = None self.grid_sample =", "self).__init__() self.d = len(real_space) self.m = sample_size self.m_i = induced_point assert len(mesh_size) ==", "int(np.log2(s) - 1) print(level) for l in range(1, level+1): r_l = 2 **", "= self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) if theta is None: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr_boundary =", "xy = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) if theta is None: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr_boundary", "Lp-norm type are postive assert d > 0 and p > 0 self.d", "idx, idx_all = self.sample(new_sample=True, index0=index) else: idx, idx_all = self.sample(new_sample=False, index0=index) index =", "a1, a2, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d / 0.1)", "edge_index.append((i + n_x, i)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d)", "index = (index + self.m) % self.n grid, grid_all = self.get_grid() edge_index, edge_index_down,", "self.resolution) def ball_connectivity(self, grid): pwd = sklearn.metrics.pairwise_distances(grid) edge_index = np.vstack(np.where(pwd <= self.radius)) n_edges", "X2 = np.tile(x2.reshape(1, n), [n, 1]) X_diff = X1 - X2 Y1 =", "/ (x.size()[1] - 1.0) all_norms = (h**(self.d/self.p))*torch.norm(x.view(num_examples,-1) - y.view(num_examples,-1), self.p, 1) if self.reduction:", "+ high def encode(self, x): s = x.size() x = x.view(s[0], -1) x", "self.radius)[1], pwd_index) distance = PWD[PWD_index] X_difference = X_DIFF[PWD_index] Y_difference = Y_DIFF[PWD_index] n_edges =", "j != self.n_layers - 1: x = torch.sin(x) return x # generate graphs", "ball_connectivity(self, r, is_forward=False): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges =", "self.data = h5py.File(self.file_path) self.old_mat = False def load_file(self, file_path): self.file_path = file_path self._load_file()", "= a[x, y+1] edge_attr.append((x/n_x, y/n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x, a2, a1)) X =", "torch.randint(0,self.r,(1,)) y = torch.randint(0,self.r,(1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub = theta[x::self.r,", "self.radius)) n_edges = edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges def get_data(self, theta): theta_d =", "from numpy index2 = index2.repeat(2, axis = 0).repeat(2, axis = 1) index2 =", "gaussian_filter(out, sigma=sigma, mode='constant', cval=0) # out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) # generate", "= [] def sample(self, new_sample=True, index0=0): self.idx = [] self.grid_sample = [] if", "* 2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) data.append(Data(x=X, edge_index=edge_index,", "= np.vstack([xx.ravel() for xx in np.meshgrid(xs, ys)]).T edge_index = [] edge_attr = []", "return (X, edge_index, edge_attr, mask_index, num_nodes) def multi_pole_grid1d(theta, theta_d, s, N, is_periodic=False): grid_list", "A simple feedforward neural network class DenseNet(torch.nn.Module): def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False):", "xy = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) if theta is None: edge_attr = f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr", "torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr", "np.array(range(n_y)) edge_index = [] edge_attr = [] for x in range(n_x): i =", "edge_index_inner + num_nodes edge_index_global.append(edge_index_inner) edge_attr_global.append(edge_attr_inner) # construct X # if (is_high): # X", "== 1: self.n = mesh_size[0] self.grid = np.linspace(real_space[0][0], real_space[0][1], self.n).reshape((self.n, 1)) else: self.n", "self.edge_features] = a[edge_index[0]] edge_attr[:, 4 + self.edge_features: 4 + self.edge_features * 2] =", "sigma=1): # pred is a list (batches) of list (time seq) assert len(pred)", "for l in range(1, level+1): r_l = 2 ** (l - 1) s_l", "self.m > self.n: self.m = self.n self.idx = np.array(range(self.n)) self.grid_sample = self.grid def", "n_edge_index = 0 for l in range(self.level-1): edge_index_down_range[l, 0] = n_edge_index edge_index_up_range[l, 0]", "self.data = None self.old_mat = None self._load_file() def _load_file(self): try: self.data = scipy.io.loadmat(self.file_path)", "num_nodes += n_l # #construct inter-graph edge if l != depth-1: index2 =", "< index_end: idx = self.perm[index: index_end] else: idx = torch.cat((self.perm[index: ],self.perm[: index_end]), dim=0)", "= self.grid[self.edge_index.T].reshape((self.n_edges,-1)) else: edge_attr = np.zeros((self.n_edges, 2*self.d+2)) edge_attr[:,0:2*self.d] = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) edge_attr[:, 2 *", "[] self.n_edges_inter = [] def sample(self): self.idx = [] self.grid_sample = [] perm", "= theta[self.edge_index_down[l][0]] edge_attr_down[:, 2 * self.d + 1] = theta[self.edge_index_down[l][1]] self.edge_attr_down.append(torch.tensor(edge_attr_down, dtype=torch.float)) edge_attr_up[:,", "num_nodes num_nodes += n_l # #construct inter-graph edge if l != depth-1: index2", "h_x_l, a) # update index edge_index_inner = edge_index_inner + num_nodes edge_index_global.append(edge_index_inner) edge_attr_global.append(edge_attr_inner) #", "def assemble(self, pred, split_idx, batch_size2, sigma=1): assert len(pred) == len(split_idx) assert len(pred) ==", "x = x.view(s) return x def decode(self, x): s = x.size() x =", "grid1) PWD = np.stack([pwd0,pwd1], axis=2) pwd = np.min(PWD, axis=2) self.edge_index = np.vstack(np.where(pwd <=", "for l in range(self.level): edge_index_range[l, 0] = n_edge_index n_edge_index = n_edge_index + self.edge_index[l].shape[1]", "+ self.edge_index[l].shape[1] edge_index_range[l, 1] = n_edge_index n_edge_index = 0 for l in range(self.level-1):", "self.d + self.attr_features: 2 * self.d + 2*self.attr_features] = theta[self.edge_index[1]].view(-1, self.attr_features) else: xy", "torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr = np.zeros((n_edges, 4+self.edge_features*2)) a =", "1, y] edge_attr.append((x / n_x, y / n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x, a2,", "False def load_file(self, file_path): self.file_path = file_path self._load_file() def read_field(self, field): x =", "_load_file(self): try: self.data = scipy.io.loadmat(self.file_path) self.old_mat = True except: self.data = h5py.File(self.file_path) self.old_mat", "edge_attr_22[:, 2 * self.d] = theta[self.edge_index_22[0]] edge_attr_22[:, 2 * self.d + 1] =", "0 for l in range(self.level-1): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l], self.grid_sample[l+1]) edge_index = np.vstack(np.where(pwd <=", "x if (x != n_x - 1): d = 1 / n_x edge_index.append((i,", "r, a): n = n_x * n_y xs = np.linspace(0.0, 1.0, n_x) ys", "self.grid_sample_all[self.edge_index_up[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_up[:, 2 * self.d] = theta[self.edge_index_up[l][0]] edge_attr_up[:, 2", "np.array(range(n_l//4)).reshape(h_x_l//2, h_y_l//2) # torch.repeat is different from numpy index2 = index2.repeat(2, axis =", "if self.reduction: if self.size_average: return torch.mean(all_norms) else: return torch.sum(all_norms) return all_norms def rel(self,", "n]) Y2 = np.tile(y2.reshape(1, n), [n, 1]) Y_diff = Y1 - Y2 return", "> 0 and p > 0 self.d = d self.p = p self.reduction", "in range(self.level): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l]) edge_index = np.vstack(np.where(pwd <= radius_inner[l])) + index self.edge_index.append(edge_index)", "valid node if is_periodic: x_j = x_j % s_l if (x_j in range(s_l)):", "edge_index.shape, edge_attr.shape, index_split.shape) return data def assemble(self, pred, split_idx, batch_size2, sigma=1): assert len(pred)", "self.m = sample_sizes[0] self.level = level assert len(sample_sizes) == level assert len(mesh_size) ==", "theta_sample], dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=1).reshape(self.T,-1) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1, )", "grid grid1[:,0] = grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) X_diff1, Y_diff1 = self.pairwise_difference(grid, grid1)", "= X1 - X2 Y1 = np.tile(y1.reshape(n, 1), [1, n]) Y2 = np.tile(y2.reshape(1,", "axis=0) self.n_edges_boundary = self.edge_index_boundary.shape[1] return torch.tensor(self.edge_index_boundary, dtype=torch.long) def attributes_boundary(self, f=None, theta=None): # if", "and assemble with downsample class DownsampleGridSplitter(object): def __init__(self, grid, resolution, r, m=100, radius=0.15,", "self.edge_index_up = [] self.n_edges_inner = [] self.n_edges_inter = [] edge_index_out = [] edge_index_down_out", "mesh_size[j])) self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T self.idx =", "theta_all[idx_all,:] ], dim=1) data.append(Data(x=x, edge_index_mid=edge_index, edge_index_down=edge_index_down, edge_index_up=edge_index_up, edge_index_range=edge_index_range, edge_index_down_range=edge_index_down_range, edge_index_up_range=edge_index_up_range, edge_attr_mid=edge_attr, edge_attr_down=edge_attr_down, edge_attr_up=edge_attr_up,", "__init__(self, file_path, to_torch=True, to_cuda=False, to_float=True): super(MatReader, self).__init__() self.to_torch = to_torch self.to_cuda = to_cuda", "(self.std + self.eps)) + self.mean return x def cuda(self): self.mean = self.mean.cuda() self.std", "theta_d) theta_l = theta_l[:, ::r_l, :] theta_l = theta_l.reshape(N, n_l, theta_d) theta_l =", "torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge(n_x, n_y, a=None): if a !=", "self.grid_sample_i = self.grid[self.idx_i] self.grid_sample_both = self.grid[self.idx_both] return self.idx, self.idx_i, self.idx_both def get_grid(self): return", "= x[()] x = np.transpose(x, axes=range(len(x.shape) - 1, -1, -1)) if self.to_float: x", "data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def sample(self,", "radius self.edge_features = edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def pairwise_difference(self,grid1, grid2): n", "<= self.radius)) n_edges = edge_index.shape[1] edge_index = torch.tensor(edge_index, dtype=torch.long) if edge_features == 0:", "edge_index_inter = torch.tensor(edge_index_inter, dtype=torch.long) edge_index_inter = edge_index_inter.transpose(0,1) edge_index_list.append(edge_index_inter) edge_index_list_cuda.append(edge_index_inter.cuda()) print('edge_inter', edge_index_inter.shape) print(len(grid_list),len(edge_index_list),len(theta_list)) return", "we downsample sub-grids self.grid = grid.reshape(resolution, resolution,2) # self.theta = theta.reshape(resolution, resolution,-1) #", "list of data data = [] index = 0 for i in range(self.splits):", "1)) else: self.n = 1 grids = [] for j in range(self.d): grids.append(np.linspace(real_space[j][0],", "self.attr_features) else: xy = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) if theta is None: edge_attr = f(xy[:,", "if edge_features == 0: edge_attr = grid_sample[edge_index.T].reshape(n_edges, -1) else: edge_attr = np.zeros((n_edges, 2*self.d+2))", "y * n_x + x if (x != n_x - 1): edge_index.append((i, i", "self.grid[self.edge_index.T].reshape((self.n_edges,-1)) else: edge_attr = np.zeros((self.n_edges, 2*self.d+2)) edge_attr[:,0:2*self.d] = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) edge_attr[:, 2 * self.d]", "+ num_nodes num_nodes += n_l # #construct inter-graph edge if l != depth-1:", "range(self.level-1): edge_index_down_range[l, 0] = n_edge_index edge_index_up_range[l, 0] = n_edge_index n_edge_index = n_edge_index +", "theta_split = theta_sub Y_split = Y_sub.reshape(self.T, -1) index_split = index_sub.reshape(-1, ) X =", "= perm[:m] grid_sample = self.grid.reshape(self.n, -1)[idx] theta_sample = theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.n,", "node if (x_j in range(s_l)): edge_index_nn.append([x_i,x_j]) edge_index_nn = torch.tensor(edge_index_nn, dtype=torch.long) edge_index_nn = edge_index_nn.transpose(0,1)", "class RandomMeshGenerator(object): def __init__(self, real_space, mesh_size, sample_size, attr_features=1): super(RandomMeshGenerator, self).__init__() self.d = len(real_space)", "<= self.radius)) n_edges = edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges def get_data(self, theta): theta_d", "dtype=torch.float) return out.reshape(self.T,self.n) def downsample(data, grid_size, l): data = data.reshape(-1, grid_size, grid_size) data", "= (index+self.ms[l]) % self.n if index < index_end: idx = self.perm[index: index_end] else:", "Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float)", "level+1): r_l = 2 ** (l - 1) s_l = s // r_l", "edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4 + self.edge_features] = a[edge_index[0]] edge_attr[:, 4", "x in range(n_x): i = x i1 = (x+1)%n_x edge_index.append((i, i1)) edge_index.append((i1, i", "self.n_layers - 1: x = torch.sin(x) return x # generate graphs on square", "depth-1: index2 = np.array(range(n_l//4)).reshape(h_x_l//2, h_y_l//2) # torch.repeat is different from numpy index2 =", "= to_float self.file_path = file_path self.data = None self.old_mat = None self._load_file() def", "xx in np.meshgrid(xs, ys)]).T edge_index = [] edge_attr = [] for i1 in", "if index0 < index_end: idx_all = self.perm[index0: index_end] else: idx_all = torch.cat((self.perm[index0:], self.perm[:", "a1 = a[x, y] a2 = a[x, y+1] edge_attr.append((x/n_x, y/n_y, a1, a2)) edge_attr.append((y/n_y,", "self.attr_features) edge_attr[:, 2 * self.d + self.attr_features: 2 * self.d + 2*self.attr_features] =", "* (self.std + self.eps)) + self.mean return x def cuda(self): self.mean = self.mean.cuda()", "= np.array(range(n_y)) edge_index = [] edge_attr = [] for x in range(n_x): i", "n_sub perm = torch.randperm(self.n) idx = perm[:m] grid_sample = self.grid.reshape(self.n, -1)[idx] theta_sample =", "= mesh_size[0] assert len(mesh_size) == self.d if self.d == 1: self.n = mesh_size[0]", "+ self.grid_sample[l].shape[0] self.edge_index_down.append(edge_index) edge_index_down_out.append(torch.tensor(edge_index, dtype=torch.long)) self.edge_index_up.append(edge_index[[1,0],:]) edge_index_up_out.append(torch.tensor(edge_index[[1,0],:], dtype=torch.long)) self.n_edges_inter.append(edge_index.shape[1]) index = index +", "# generate multi-level graph class RandomMultiMeshGenerator(object): def __init__(self, real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshGenerator,", "edge_attr[:, 1] = Y_difference.reshape(n_edges, ) edge_attr[:, 2] = distance.reshape(n_edges, ) edge_attr[:, 3:3+self.edge_features] =", "data def assemble(self, pred, split_idx, batch_size2, sigma=1, cuda=False): assert len(pred) == len(split_idx) assert", "self).__init__() self.grid = grid self.resolution = resolution self.n = resolution**d self.d = d", "range(-3,4): x_j = x_i + x # if (xj, yj) is a valid", "= sklearn.metrics.pairwise_distances(self.grid) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long)", "self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) edge_attr_boundary[:, 2 * self.d] = theta[self.edge_index_boundary[0]] edge_attr_boundary[:, 2 * self.d +1] =", "x def decode(self, x, sample_idx=None): x = (x * (self.std + self.eps)) +", "induced_point): super(RandomTwoMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_size self.m_i = induced_point assert", "self.d + 1] = theta[self.edge_index_22[1]] return torch.tensor(edge_attr, dtype=torch.float), \\ torch.tensor(edge_attr_12, dtype=torch.float), \\ torch.tensor(edge_attr_21,", "= self.grid_sample_all[self.edge_index[l].T].reshape((self.n_edges_inner[l], 2*self.d)) self.edge_attr.append(torch.tensor(edge_attr)) for l in range(self.level - 1): edge_attr_down = self.grid_sample_all[self.edge_index_down[l].T].reshape((self.n_edges_inter[l],", "self.n boundary1 = np.array(range(0, s)) boundary2 = np.array(range(n - s, n)) boundary3 =", "np.tile(boundary, self.n) self.edge_index_boundary = np.stack([vertice2, vertice1], axis=0) self.n_edges_boundary = self.edge_index_boundary.shape[1] return torch.tensor(self.edge_index_boundary, dtype=torch.long)", "= self.edge_index.shape[1] if is_forward: print(self.edge_index.shape) self.edge_index = self.edge_index[:, self.edge_index[0] >= self.edge_index[1]] print(self.edge_index.shape) self.n_edges", "% self.n if index < index_end: idx = self.perm[index: index_end] else: idx =", "y::self.r].reshape(-1,) index_sub = self.index[x::self.r, y::self.r].reshape(-1,) n_sub = Y_sub.shape[0] if self.m >= n_sub: m", "* self.d] = theta[self.edge_index_down[l][0]] edge_attr_down[:, 2 * self.d + 1] = theta[self.edge_index_down[l][1]] self.edge_attr_down.append(torch.tensor(edge_attr_down,", "// batch_size2 out = torch.zeros(self.n, ) if cuda: out = out.cuda() for i", "return X, edge_index, edge_attr def grid_edge(n_x, n_y, a=None): if a != None: a", "= theta[self.idx] edge_attr = np.zeros((self.n_edges, 2 * self.d + 2*self.attr_features)) edge_attr[:, 0:2 *", "[] for grid in self.grid_sample: grid_out.append(torch.tensor(grid, dtype=torch.float)) return grid_out, torch.tensor(self.grid_sample_all, dtype=torch.float) def ball_connectivity(self,", "is a valid node if (x_j in range(s_l)): edge_index_nn.append([x_i,x_j]) edge_index_nn = torch.tensor(edge_index_nn, dtype=torch.long)", "in range(self.splits): pred[sample_idx_list[i]] = out_list[i].reshape(-1) return pred # generate graph, with split and", "edge_index_down_out = [] edge_index_up_out = [] index = 0 for l in range(self.level):", "if a != None: a1 = a[x, y] a2 = a[x + 1,", "dim=1) edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3+self.edge_features*2)) a", "else: if len(self.mean.shape) == len(sample_idx[0].shape): std = self.std[sample_idx] + self.eps # batch*n mean", "= self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_21 = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_22 = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) else: theta", "2*self.d] = a[edge_index[0]] edge_attr[:, 2*self.d+1] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) data.append(Data(x=X, edge_index=edge_index,", "= self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, 0] =", "theta[self.edge_index_down[l][0]] edge_attr_down[:, 2 * self.d + 1] = theta[self.edge_index_down[l][1]] self.edge_attr_down.append(torch.tensor(edge_attr_down, dtype=torch.float)) edge_attr_up[:, 0:2", "print(len(grid_list),len(edge_index_list),len(theta_list)) return grid_list, theta_list, edge_index_list, edge_index_list_cuda def get_edge_attr(grid, theta, edge_index): n_edges = edge_index.shape[1]", "<= r22)) + self.m self.n_edges = self.edge_index.shape[1] self.n_edges_12 = self.edge_index_12.shape[1] self.n_edges_22 = self.edge_index_22.shape[1]", "edge_index = [] edge_attr = [] for x in range(n_x): i = x", "(x - self.mean) / (self.std + self.eps) return x def decode(self, x, sample_idx=None):", "grid_sample = self.grid.reshape(self.n, -1)[idx] theta_sample = theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.n, )[idx] grid_split", "x): for j, l in enumerate(self.layers): x = l(x) if j != self.n_layers", "+ 1 grid4[:, 1] = grid[:, 1] - 1 pwd4 = sklearn.metrics.pairwise_distances(grid, grid4)", "= d self.m = m self.l = l self.radius = radius assert self.n", "/ 0.01) ** 2))) edge_index.append((i + 1, i)) edge_attr.append((d, a2, a1, 1 /", "self.mean.cpu() self.std = self.std.cpu() # normalization, scaling by range class RangeNormalizer(object): def __init__(self,", "+ index self.edge_index.append(edge_index) edge_index_out.append(torch.tensor(edge_index, dtype=torch.long)) self.n_edges_inner.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0] index =", "len(radius_inter) == self.level - 1 self.edge_index = [] self.edge_index_down = [] self.edge_index_up =", "return edge_attr_out, edge_attr_down_out, edge_attr_up_out # generate graph, with split and assemble class RandomGridSplitter(object):", "= 1.0 / (x.size()[1] - 1.0) all_norms = (h**(self.d/self.p))*torch.norm(x.view(num_examples,-1) - y.view(num_examples,-1), self.p, 1)", "x = (x * (self.std + self.eps)) + self.mean return x def cuda(self):", "x, y = split_idx_i[j] if self.resolution%2==1: if x==0: nx = self.s else: nx", "pwd = sklearn.metrics.pairwise_distances(self.grid) rbf = np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf) self.edge_index = np.vstack(np.where(sample)) self.n_edges", "= torch.cat([torch.zeros(n_l, l * 2), X, torch.zeros(n_l, (depth - 1 - l) *", "edge_attr[:, 2 * self.d : 2 * self.d + self.attr_features] = theta[self.edge_index[0]].view(-1, self.attr_features)", "mesh_size, level, sample_sizes): super(RandomMultiMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_sizes self.level =", "num_examples = x.size()[0] #Assume uniform mesh h = 1.0 / (x.size()[1] - 1.0)", "def attributes(self, f=None, theta=None): if f is None: if theta is None: edge_attr", "get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float), \\ torch.tensor(self.grid_sample_i, dtype=torch.float), \\ torch.tensor(self.grid_sample_both, dtype=torch.float) def ball_connectivity(self, r11,", "theta[self.idx] edge_attr = np.zeros((self.n_edges, 2 * self.d + 2*self.attr_features)) edge_attr[:, 0:2 * self.d]", "for j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) def forward(self, x): for j, l in", "= [] self.idx_all = None self.grid_sample = [] self.grid_sample_all = None self.edge_index =", "def sample(self, new_sample=True, index0=0): self.idx = [] self.grid_sample = [] if (new_sample) or", "dtype=torch.float) def get_boundary(self): s = self.s n = self.n boundary1 = np.array(range(0, s))", "__init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet, self).__init__() self.n_layers = len(layers) - 1 assert", "*= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T self.splits = self.n //", "edge_attr = np.zeros((n_edges, 4+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges, -1)", "self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) # # generate two-level graph class RandomTwoMeshGenerator(object):", "theta_d) data = [] for x in range(self.r): for y in range(self.r): grid_sub", "x_j % s_l if (x_j in range(s_l)): # if (xi, yi), (xj, yj)", "1 / n_x a1 = a[x, y] a2 = a[x + 1, y]", "= torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return", "out.cuda() for i in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i].reshape(batch_size2, self.m)", "grid_split = grid_sub theta_split = theta_sub Y_split = Y_sub.reshape(-1,) index_split = index_sub.reshape(-1,) X", "np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] if is_forward: print(self.edge_index.shape) self.edge_index = self.edge_index[:, self.edge_index[0]", "theta is None: for l in range(self.level): edge_attr = self.grid_sample_all[self.edge_index[l].T].reshape((self.n_edges_inner[l], 2*self.d)) self.edge_attr.append(torch.tensor(edge_attr)) for", "- grid_sub.shape[0] idx = perm[:m] grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] grid_split =", "torch.zeros(self.n, ) if cuda: out = out.cuda() for i in range(len(pred)): pred_i =", "x.view(s[0], -1) x = self.a*x + self.b x = x.view(s) return x def", "grid, params): edge_index_global = [] edge_attr_global = [] X_global = [] num_nodes =", "np.vstack(np.where(pwd <= radius_inter[l])) + index edge_index[1, :] = edge_index[1, :] + self.grid_sample[l].shape[0] self.edge_index_down.append(edge_index)", "theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.T, self.resolution, self.resolution) x =", "index1 = index1 + num_nodes num_nodes += n_l # #construct inter-graph edge if", "if params==None: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) else: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, params=params)) print('test',", "self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid) rbf = np.exp(-pwd**2/sigma**2)", "dtype=torch.float) # generate graphs with sampling class RandomMeshGenerator(object): def __init__(self, real_space, mesh_size, sample_size,", "assemble class TorusGridSplitter(object): def __init__(self, grid, resolution, r, m=100, radius=0.15, T=None, edge_features=1, ):", "1.0, n_x) ys = np.linspace(0.0, 1.0, n_y) # xs = np.array(range(n_x)) # ys", "grid_l = torch.tensor(grid_l, dtype=torch.float) print(grid_l.shape) grid_list.append(grid_l) theta_l = theta[:,:,:theta_d].reshape(N, s, theta_d) theta_l =", "0.1) ** 2), np.exp(-(d / 0.01) ** 2))) if (y != n_y -", "= a[x + 1, y] edge_index.append((i, i + 1)) edge_attr.append((d, a1, a2, 1", "2 * self.d + 2)) edge_attr_up = np.zeros((self.n_edges_inter[l], 2 * self.d + 2))", "self.edge_index = self.edge_index[:, self.edge_index[0] >= self.edge_index[1]] print(self.edge_index.shape) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long)", "x, y): num_examples = x.size()[0] #Assume uniform mesh h = 1.0 / (x.size()[1]", "low)/(mymax - mymin) self.b = -self.a*mymax + high def encode(self, x): s =", "% self.n grid, grid_all = self.get_grid() edge_index, edge_index_down, edge_index_up = self.ball_connectivity(radius_inner, radius_inter) edge_index_range,", "in range(self.d): grids.append(np.linspace(real_space[j][0], real_space[j][1], mesh_size[j])) self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx", "= torch.tensor([x,y],dtype=torch.long).reshape(1,2) if params==None: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) else: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx,", "= theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.T, self.n)[:,idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0) theta_split", "x.view(s[0], -1) x = (x - self.b)/self.a x = x.view(s) return x #loss", "in (-1,1): x_j = x_i + x if is_periodic: x_j = x_j %", "on Torus, with split and assemble class TorusGridSplitter(object): def __init__(self, grid, resolution, r,", "# T*batch*n mean = self.mean[:,sample_idx] # x is in shape of batch*n or", "-1)) edge_attr_21[:, 2 * self.d] = theta[self.edge_index_21[0]] edge_attr_21[:, 2 * self.d + 1]", "= np.stack([vertice2, vertice1], axis=0) self.n_edges_boundary = self.edge_index_boundary.shape[1] return torch.tensor(self.edge_index_boundary, dtype=torch.long) def attributes_boundary(self, f=None,", "2 * self.d + 1] = theta[self.edge_index_down[l][1]] self.edge_attr_down.append(torch.tensor(edge_attr_down, dtype=torch.float)) edge_attr_up[:, 0:2 * self.d]", "split_idx, batch_size2, sigma=1): # pred is a list (batches) of list (time seq)", "for l in range(self.level - 1): edge_attr_down = self.grid_sample_all[self.edge_index_down[l].T].reshape((self.n_edges_inter[l], 2*self.d)) edge_attr_up = self.grid_sample_all[self.edge_index_up[l].T].reshape((self.n_edges_inter[l],", "len(real_space) self.m = sample_sizes self.level = level assert len(sample_sizes) == level assert len(mesh_size)", "edge_attr.append((y/n_y, x/n_x, a2, a1)) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1)", "+ self.eps # n mean = self.mean else: if len(self.mean.shape) == len(sample_idx[0].shape): std", "self.edge_index_12.shape[1] self.n_edges_22 = self.edge_index_22.shape[1] return torch.tensor(self.edge_index, dtype=torch.long), \\ torch.tensor(self.edge_index_12, dtype=torch.long), \\ torch.tensor(self.edge_index_21, dtype=torch.long),", "(xj, yj) not NearestNeighbor if abs(x)>=2: # if their parents are NN if", "= [] self.edge_index_up = [] self.n_edges_inner = [] self.n_edges_inter = [] edge_index_out =", "def __init__(self, x, eps=0.00001): super(GaussianNormalizer, self).__init__() self.mean = torch.mean(x) self.std = torch.std(x) self.eps", "class RangeNormalizer(object): def __init__(self, x, low=0.0, high=1.0): super(RangeNormalizer, self).__init__() mymin = torch.min(x, 0)[0].view(-1)", "index0 < index_end: idx_all = self.perm[index0: index_end] else: idx_all = torch.cat((self.perm[index0:], self.perm[: index_end]),", "class RandomMultiMeshSplitter(object): def __init__(self, real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshSplitter, self).__init__() self.d = len(real_space)", "0] = X_difference.reshape(n_edges, ) edge_attr[:, 1] = Y_difference.reshape(n_edges, ) edge_attr[:, 2] = distance.reshape(n_edges,", ":].reshape(-1, 2) theta_sub = theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub = Y[:,x::self.r, y::self.r].reshape(self.T,-1) index_sub", "assert self.n_layers >= 1 self.layers = nn.ModuleList() for j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1]))", "theta, edge_features=1): data = [] for i in range(self.l): perm = torch.randperm(self.n) perm", "self.std + self.eps # n mean = self.mean else: if len(self.mean.shape) == len(sample_idx[0].shape):", "= self.std.cuda() def cpu(self): self.mean = self.mean.cpu() self.std = self.std.cpu() # normalization, Gaussian", "= torch.cat([theta_sub, theta_sample], dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=0).reshape(-1,) index_split = torch.cat([index_sub, idx],", "X_DIFF = np.stack([X_diff0,X_diff1,X_diff2,X_diff3,X_diff4], axis=2) Y_DIFF = np.stack([Y_diff0, Y_diff1, Y_diff2, Y_diff3, Y_diff4], axis=2) pwd", "+ self.edge_features * 2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2)", "edge_index_nn.transpose(0,1) edge_index_list.append(edge_index_nn) edge_index_list_cuda.append(edge_index_nn.cuda()) print('edge', edge_index_nn.shape) # we then compute the interactive neighbors --", "= torch.std(x, 0) self.eps = eps def encode(self, x): x = (x -", "<= r)) self.n_edges = self.edge_index.shape[1] if is_forward: print(self.edge_index.shape) self.edge_index = self.edge_index[:, self.edge_index[0] >=", "edge_attr_21[:, 2 * self.d + 1] = theta[self.edge_index_21[1]] edge_attr_22 = np.zeros((self.n_edges_22, 3 *", "l in range(self.level-1): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l], self.grid_sample[l+1]) edge_index = np.vstack(np.where(pwd <= radius_inter[l])) +", "a1, a2)) edge_attr.append((y/n_y, x/n_x, a2, a1)) X = torch.tensor(grid, dtype=torch.float) # Exact =", "return x def cuda(self): self.mean = self.mean.cuda() self.std = self.std.cuda() def cpu(self): self.mean", "edge_attr = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr_12 = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_21 = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_22", "# batch*n mean = self.mean[sample_idx] if len(self.mean.shape) > len(sample_idx[0].shape): std = self.std[:,sample_idx]+ self.eps", "= np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T self.idx = [] self.idx_all = None self.grid_sample", "X_diff, Y_diff def torus_connectivity(self, grid): pwd0 = sklearn.metrics.pairwise_distances(grid, grid) X_diff0, Y_diff0 = self.pairwise_difference(grid,", "= self.data[field] if not self.old_mat: x = x[()] x = np.transpose(x, axes=range(len(x.shape) -", "Y2 = np.tile(y2.reshape(1, n), [n, 1]) Y_diff = Y1 - Y2 return X_diff,", "torch.from_numpy(x) if self.to_cuda: x = x.cuda() return x def set_cuda(self, to_cuda): self.to_cuda =", "np.vstack([xx.ravel() for xx in np.meshgrid(xs, ys)]).T edge_index = [] edge_attr = [] for", "-- their parents are NN but they are not NearestNeighbor edge_index_inter = []", "torch.tensor(self.edge_index_12, dtype=torch.long), \\ torch.tensor(self.edge_index_21, dtype=torch.long), \\ torch.tensor(self.edge_index_22, dtype=torch.long) def attributes(self, theta=None): if theta", "cpu(self): self.mean = self.mean.cpu() self.std = self.std.cpu() # normalization, Gaussian class GaussianNormalizer(object): def", "sklearn.metrics.pairwise_distances(grid, grid) grid1 = grid grid1[:,0] = grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) PWD", "perm[:self.m] self.idx_i = perm[self.m: self.m+self.m_i] self.idx_both = perm[: self.m+self.m_i] self.grid_sample = self.grid[self.idx] self.grid_sample_i", "* self.d + 2*self.attr_features)) edge_attr[:, 0:2 * self.d] = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2", "dim=1) data.append(Data(x=x, edge_index_mid=edge_index, edge_index_down=edge_index_down, edge_index_up=edge_index_up, edge_index_range=edge_index_range, edge_index_down_range=edge_index_down_range, edge_index_up_range=edge_index_up_range, edge_attr_mid=edge_attr, edge_attr_down=edge_attr_down, edge_attr_up=edge_attr_up, sample_idx=idx[0])) return", "index_split = index_sub.reshape(-1, ) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges, distance, X_difference,", "+ 2) % n_x edge_index.append((i, i2)) edge_index.append((i2, i )) if a != None:", "= grid grid1[:,0] = grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) X_diff1, Y_diff1 = self.pairwise_difference(grid,", "= downsample(params, n_x, (2 ** l)) if grid == 'grid': X, edge_index_inner, edge_attr_inner", "self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T def ball_connectivity(self, r): pwd = sklearn.metrics.pairwise_distances(self.grid)", "* self.l // batch_size2 out = torch.zeros(self.n, ) if cuda: out = out.cuda()", "= edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def pairwise_difference(self,grid1, grid2): n = grid1.shape[0]", "self.grid def sample(self): perm = torch.randperm(self.n) self.idx = perm[:self.m] self.grid_sample = self.grid[self.idx] return", "x_j = x_i + x if is_periodic: x_j = x_j % s_l #", "+ x if (x != n_x - 1): edge_index.append((i, i + 1)) edge_attr.append((1,", "l in range(self.level - 1): edge_attr_down = self.grid_sample_all[self.edge_index_down[l].T].reshape((self.n_edges_inter[l], 2*self.d)) edge_attr_up = self.grid_sample_all[self.edge_index_up[l].T].reshape((self.n_edges_inter[l], 2*self.d))", "if self.m >= n_sub: m = self.m - n_sub perm = torch.randperm(self.n) idx", "* self.d] = theta[self.edge_index_boundary[0]] edge_attr_boundary[:, 2 * self.d +1] = theta[self.edge_index_boundary[1]] else: xy", "boundary2 = np.array(range(n - s, n)) boundary3 = np.array(range(s, n, s)) boundary4 =", "= theta[self.edge_index_12[0]] edge_attr_12[:, 2 * self.d + 1] = theta[self.edge_index_12[1]] edge_attr_21 = np.zeros((self.n_edges_12,", "= radius assert self.n % self.m == 0 self.num = self.n // self.m", "l in range(self.level-1): edge_index_down_range[l, 0] = n_edge_index edge_index_up_range[l, 0] = n_edge_index n_edge_index =", "real_space, mesh_size): super(SquareMeshGenerator, self).__init__() self.d = len(real_space) self.s = mesh_size[0] assert len(mesh_size) ==", "x = x.view(s[0], -1) x = (x - self.b)/self.a x = x.view(s) return", "(x+1)%n_x edge_index.append((i, i1)) edge_index.append((i1, i )) i2 = (x + 2) % n_x", "resolution**2 self.m = m self.radius = radius self.edge_features = edge_features self.index = torch.tensor(range(self.n),", "n_y, grid, params): edge_index_global = [] edge_attr_global = [] X_global = [] num_nodes", ":].reshape(-1, theta_d) Y_sub = Y[:,x::self.r, y::self.r].reshape(self.T,-1) index_sub = self.index[x::self.r, y::self.r].reshape(-1, ) n_sub =", "reduction self.size_average = size_average def abs(self, x, y): num_examples = x.size()[0] #Assume uniform", "assert len(radius_inter) == self.level - 1 self.edge_index = [] self.edge_index_down = [] self.edge_index_up", "torch.cat((self.perm[index0:], self.perm[: index_end]), dim=0) self.idx_all = idx_all self.grid_sample_all = self.grid[self.idx_all] return self.idx, self.idx_all", "self.rel(x, y) # A simple feedforward neural network class DenseNet(torch.nn.Module): def __init__(self, layers,", "TorusGridSplitter(object): def __init__(self, grid, resolution, r, m=100, radius=0.15, T=None, edge_features=1, ): super(TorusGridSplitter, self).__init__()", "torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def multi_grid(depth, n_x, n_y, grid, params): edge_index_global", "3 * self.d)) edge_attr_21[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_21[:, 2 *", "neighbors -- their parents are NN but they are not NearestNeighbor edge_index_inter =", "index2 = np.array(range(n_l//4)).reshape(h_x_l//2, h_y_l//2) # torch.repeat is different from numpy index2 = index2.repeat(2,", "= grid grid4[:, 0] = grid[:, 0] + 1 grid4[:, 1] = grid[:,", "= index+self.m[l] self.idx_all = perm[:index] self.grid_sample_all = self.grid[self.idx_all] return self.idx, self.idx_all def get_grid(self):", "def grid_edge_aug(n_x, n_y, a): a = a.reshape(n_x, n_y) xs = np.linspace(0.0, 1.0, n_x)", "= sklearn.metrics.pairwise_distances(grid) edge_index = np.vstack(np.where(pwd <= self.radius)) n_edges = edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long),", "np.where(pwd <= self.radius)[1], pwd_index) distance = PWD[PWD_index] X_difference = X_DIFF[PWD_index] Y_difference = Y_DIFF[PWD_index]", "edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def ball_connectivity(self, grid): pwd = sklearn.metrics.pairwise_distances(grid) edge_index", "self.m self.edge_index_21 = self.edge_index_12[[1,0],:] self.edge_index_22 = np.vstack(np.where(pwd22 <= r22)) + self.m self.n_edges =", "x = self.data[field] if not self.old_mat: x = x[()] x = np.transpose(x, axes=range(len(x.shape)", "edge_index_up_range = torch.zeros((self.level-1,2), dtype=torch.long) n_edge_index = 0 for l in range(self.level): edge_index_range[l, 0]", "= edge_index_inter.transpose(0,1) edge_index_list.append(edge_index_inter) edge_index_list_cuda.append(edge_index_inter.cuda()) print('edge_inter', edge_index_inter.shape) print(len(grid_list),len(edge_index_list),len(theta_list)) return grid_list, theta_list, edge_index_list, edge_index_list_cuda def", "edge_index.append((i + 1, i)) edge_attr.append((-1, 0, 0)) if (y != n_y - 1):", "__call__(self, x, y): return self.rel(x, y) # A simple feedforward neural network class", "index self.edge_index.append(edge_index) edge_index_out.append(torch.tensor(edge_index, dtype=torch.long)) self.n_edges_inner.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0] index = 0", "range(self.level): self.idx.append(perm[index: index+self.m[l]]) self.grid_sample.append(self.grid[self.idx[l]]) index = index+self.m[l] self.idx_all = perm[:index] self.grid_sample_all = self.grid[self.idx_all]", "0:2 * self.d] = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 * self.d : 2 *", "and p > 0 self.d = d self.p = p self.reduction = reduction", "self.radius = radius self.edge_features = edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def ball_connectivity(self,", ":] + self.grid_sample[l].shape[0] self.edge_index_down.append(edge_index) edge_index_down_out.append(torch.tensor(edge_index, dtype=torch.long)) self.edge_index_up.append(edge_index[[1,0],:]) edge_index_up_out.append(torch.tensor(edge_index[[1,0],:], dtype=torch.long)) self.n_edges_inter.append(edge_index.shape[1]) index = index", "- X2 Y1 = np.tile(y1.reshape(n, 1), [1, n]) Y2 = np.tile(y2.reshape(1, n), [n,", "grid[edge_index.transpose(0,1)].reshape((n_edges, -1)) edge_attr[:, 2] = theta[edge_index[0]] edge_attr[:, 3] = theta[edge_index[1]] return torch.tensor(edge_attr, dtype=torch.float)", "self.n_edges_inter.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0] edge_index_out = torch.cat(edge_index_out, dim=1) edge_index_down_out = torch.cat(edge_index_down_out,", "self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def gaussian_connectivity(self,", "if theta is None: edge_attr = f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr = f(xy[:,0:self.d], xy[:,self.d:],", "= torch.mean(x, 0) self.std = torch.std(x, 0) self.eps = eps def encode(self, x):", "real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_sizes self.level", "= 0).repeat(2, axis = 1) index2 = torch.tensor(index2).reshape(-1) index2 = index2 + num_nodes", "and assemble class RandomMultiMeshSplitter(object): def __init__(self, real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshSplitter, self).__init__() self.d", "a list of data data = [] index = 0 for i in", "= [] edge_attr = [] for y in range(n_y): for x in range(n_x):", "def splitter(self, radius_inner, radius_inter, theta_a, theta_all): # give a test mesh, generate a", "= x.cuda() return x def set_cuda(self, to_cuda): self.to_cuda = to_cuda def set_torch(self, to_torch):", "- self.mean) / (self.std + self.eps) return x def decode(self, x, sample_idx=None): if", "= self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_12[:, 2 * self.d] = theta[self.edge_index_12[0]] edge_attr_12[:, 2 * self.d", "self.splits if is_cuda: pred = torch.zeros(self.n, ).cuda() else: pred = torch.zeros(self.n, ) for", "theta is None: edge_attr_boundary = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) else: edge_attr_boundary = np.zeros((self.n_edges_boundary, 2*self.d+2)) edge_attr_boundary[:,0:2*self.d] =", "= np.linspace(0.0, 1.0, s_l) grid_l = xs grid_l = torch.tensor(grid_l, dtype=torch.float) print(grid_l.shape) grid_list.append(grid_l)", "split and assemble with downsample class DownsampleGridSplitter(object): def __init__(self, grid, resolution, r, m=100,", "return X, edge_index, edge_attr def grid_edge_aug_full(n_x, n_y, r, a): n = n_x *", "= np.linspace(0.0, 1.0, n_x) # xs = np.array(range(n_x)) # ys = np.array(range(n_y)) edge_index", "1] + 1 pwd2 = sklearn.metrics.pairwise_distances(grid, grid2) X_diff2, Y_diff2 = self.pairwise_difference(grid, grid2) grid3", "X_difference, Y_difference def get_data(self, theta, params=None): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution,", "::l] data = data.reshape(-1, (grid_size // l) ** 2) return data def simple_grid(n_x,", "= data.reshape(-1, (grid_size // l) ** 2) return data def simple_grid(n_x, n_y): xs", "torch.cat([index1,index2], dim=-1).reshape(2,-1) edge_index_inter2 = torch.cat([index2,index1], dim=-1).reshape(2,-1) edge_index_inter = torch.cat([edge_index_inter1, edge_index_inter2], dim=1) edge_attr_inter1 =", "= torch.cat((self.perm[index: ],self.perm[: index_end]), dim=0) self.idx.append(idx) self.grid_sample.append(self.grid[idx]) index = index_end if index0 <", "2 * self.d)) edge_attr[:, 2 * self.d] = theta[self.edge_index[l][0]] edge_attr[:, 2 * self.d", "y::self.r, :].reshape(-1, 2) theta_sub = theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub = Y[:,x::self.r, y::self.r].reshape(self.T,-1)", "self.d + 1] = theta[self.edge_index_12[1]] edge_attr_21 = np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_21[:, 0:2", "perm = torch.randperm(self.n) perm = perm.reshape(self.num, self.m) for j in range(self.num): idx =", "= np.array(range(n - s, n)) boundary3 = np.array(range(s, n, s)) boundary4 = np.array(range(2", "edge_index_range, edge_index_down_range, edge_index_up_range = self.get_edge_index_range() edge_attr, edge_attr_down, edge_attr_up = self.attributes(theta=theta_a) x = torch.cat([grid_all,", "is None: edge_attr_boundary = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) else: edge_attr_boundary = np.zeros((self.n_edges_boundary, 2*self.d+2)) edge_attr_boundary[:,0:2*self.d] = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1))", "+ self.eps) return x def decode(self, x, sample_idx=None): x = (x * (self.std", "self.idx_i, self.idx_both def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float), \\ torch.tensor(self.grid_sample_i, dtype=torch.float), \\ torch.tensor(self.grid_sample_both, dtype=torch.float)", "a != None: a1 = a[x, y] a2 = a[x, y+1] edge_attr.append((x/n_x, y/n_y,", "self.std = self.std.cpu() # normalization, scaling by range class RangeNormalizer(object): def __init__(self, x,", "edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def sampleT(self,", "= edge_index_inner + num_nodes edge_index_global.append(edge_index_inner) edge_attr_global.append(edge_attr_inner) # construct X # if (is_high): #", "= out_list[i].reshape(-1) return pred # generate graph, with split and assemble with downsample", "not NearestNeighbor edge_index_inter = [] for x_i in range(s_l): for x in range(-3,4):", "self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_12[:, 2 * self.d] = theta[self.edge_index_12[0]] edge_attr_12[:, 2 * self.d +", "= np.min(PWD, axis=2) pwd_index = np.argmin(PWD, axis=2) edge_index = np.vstack(np.where(pwd <= self.radius)) pwd_index", "x.cuda() return x def set_cuda(self, to_cuda): self.to_cuda = to_cuda def set_torch(self, to_torch): self.to_torch", "theta[self.edge_index[1]] else: xy = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) if theta is None: edge_attr = f(xy[:,0:self.d], xy[:,self.d:])", "splitter(self, radius_inner, radius_inter, theta_a, theta_all): # give a test mesh, generate a list", "self.grid[self.idx_both] return self.idx, self.idx_i, self.idx_both def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float), \\ torch.tensor(self.grid_sample_i, dtype=torch.float),", "< self.n: self.splits = self.splits + 1 print('n:',self.n,' m:',self.m, ' number of splits:',", "== len(split_idx) assert len(pred[0]) == self.T assert len(pred) == self.r**2 // batch_size2 out", "= index_sub.reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges, distance, X_difference, Y_difference =", "dtype=torch.float) print(theta_l.shape) theta_list.append(theta_l) # for the finest level, we construct the nearest neighbors", "the finest level, we construct the nearest neighbors (NN) if l==1: edge_index_nn =", "self.mean.cpu() self.std = self.std.cpu() # normalization, Gaussian class GaussianNormalizer(object): def __init__(self, x, eps=0.00001):", "= [] edge_attr = [] for x in range(n_x): i = x i1", "sample_size, attr_features=1): super(RandomMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_size self.attr_features = attr_features", "dtype=torch.long) def attributes(self, theta=None): if theta is None: edge_attr = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr_12", "nx = self.s-1 if y==0: ny = self.s else: ny = self.s-1 else:", "edge_index = np.vstack(np.where(pwd <= self.radius)) n_edges = edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges def", "Y.reshape(self.resolution, self.resolution) x = torch.randint(0,self.r,(1,)) y = torch.randint(0,self.r,(1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1,", "index2 = torch.tensor(index2).reshape(-1) index2 = index2 + num_nodes index2 = torch.tensor(index2, dtype=torch.long) edge_index_inter1", "vertice2 = np.tile(boundary, self.n) self.edge_index_boundary = np.stack([vertice2, vertice1], axis=0) self.n_edges_boundary = self.edge_index_boundary.shape[1] return", "a=None): if a != None: a = a.reshape(n_x, n_y) xs = np.linspace(0.0, 1.0,", "= self.m - n_sub perm = torch.randperm(self.n) idx = perm[:m] grid_sample = self.grid.reshape(self.n,", "# construct edges index1 = torch.tensor(range(n_l), dtype=torch.long) index1 = index1 + num_nodes num_nodes", "torch.zeros((self.resolution,self.resolution)) for i in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for", "return torch.mean(diff_norms/y_norms) else: return torch.sum(diff_norms/y_norms) return diff_norms/y_norms def __call__(self, x, y): return self.rel(x,", "Gaussian class GaussianNormalizer(object): def __init__(self, x, eps=0.00001): super(GaussianNormalizer, self).__init__() self.mean = torch.mean(x) self.std", "i in range(len(pred)): pred_i = pred[i][t].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for j in", "edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def sample(self, theta,", "1, i )) if a != None: a1 = a[x, y] a2 =", "+ 2*self.attr_features)) edge_attr[:, 0:2 * self.d] = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 * self.d", "self.d = d self.p = p self.reduction = reduction self.size_average = size_average def", "self.d = len(real_space) self.m = sample_size self.m_i = induced_point assert len(mesh_size) == self.d", "return x def decode(self, x, sample_idx=None): x = (x * (self.std + self.eps))", "+ num_nodes index2 = torch.tensor(index2, dtype=torch.long) edge_index_inter1 = torch.cat([index1,index2], dim=-1).reshape(2,-1) edge_index_inter2 = torch.cat([index2,index1],", ") edge_attr[:, 2] = distance.reshape(n_edges, ) edge_attr[:, 3:3 + self.edge_features] = a[edge_index[0]] edge_attr[:,", "self.grid = np.linspace(real_space[0][0], real_space[0][1], self.n).reshape((self.n, 1)) else: self.n = 1 grids = []", "- low)/(mymax - mymin) self.b = -self.a*mymax + high def encode(self, x): s", "= torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr = np.zeros((n_edges, 4+self.edge_features*2)) a = theta_split[:,", "grid2) grid3 = grid grid3[:, :] = grid[:, :] + 1 pwd3 =", "- l) * 2)], dim=1) # else: # X_l = torch.tensor(l, dtype=torch.float).repeat(n_l, 1)", "return X_diff, Y_diff def torus_connectivity(self, grid): pwd0 = sklearn.metrics.pairwise_distances(grid, grid) X_diff0, Y_diff0 =", "= self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_21[:, 2 * self.d] = theta[self.edge_index_21[0]] edge_attr_21[:, 2 * self.d", "1] = Y_difference.reshape(n_edges, ) edge_attr[:, 2] = distance.reshape(n_edges, ) edge_attr[:, 3:3+self.edge_features] = a[edge_index[0]]", "- 1) s_l = s // r_l n_l = s_l print('level',s_l,r_l,n_l) xs =", "instead of randomly sample sub-grids, here we downsample sub-grids self.grid = grid.reshape(resolution, resolution,2)", "- 1): d = 1 / n_y edge_index.append((i, i + n_x)) edge_index.append((i +", "= perm[:self.m] self.grid_sample = self.grid[self.idx] return self.idx def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float) def", "for x in range(self.r): for y in range(self.r): grid_sub = self.grid[x::self.r, y::self.r,:].reshape(-1,2) theta_sub", "self.m > self.n: self.m = self.n self.idx = np.array(range(self.n)) self.idx_i = self.idx self.idx_both", "0.01) ** 2))) edge_index.append((i + 1, i)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1", "self.m == 0 self.num = self.n // self.m # number of sub-grid def", "self.idx_all = idx_all self.grid_sample_all = self.grid[self.idx_all] return self.idx, self.idx_all def get_grid(self): grid_out =", "[] edge_attr = [] for x in range(n_x): i = x i1 =", "return data def assemble(self, pred, split_idx, batch_size2, sigma=1): assert len(pred) == len(split_idx) assert", "- y.reshape(num_examples,-1), self.p, 1) y_norms = torch.norm(y.reshape(num_examples,-1), self.p, 1) if self.reduction: if self.size_average:", "self.resolution) x = torch.randint(0, self.r, (1,)) y = torch.randint(0, self.r, (1,)) grid_sub =", "2 * self.d] = theta[self.edge_index[0]] edge_attr[:, 2 * self.d +1] = theta[self.edge_index[1]] else:", "dtype=torch.float)) return grid_out, torch.tensor(self.grid_sample_all, dtype=torch.float) def ball_connectivity(self, radius_inner, radius_inter): assert len(radius_inner) == self.level", "could be in shape of ntrain*n or ntrain*T*n or ntrain*n*T self.mean = torch.mean(x,", "grids = [] for j in range(self.d): grids.append(np.linspace(real_space[j][0], real_space[j][1], mesh_size[j])) self.n *= mesh_size[j]", "nx = self.s else: nx = self.s-1 if y==0: ny = self.s else:", "edge_attr_inter = torch.cat([edge_attr_inter1, edge_attr_inter2], dim=0) edge_index_global.append(edge_index_inter) edge_attr_global.append(edge_attr_inter) X = torch.cat(X_global, dim=0) edge_index =", "self.n_edges_inter = [] edge_index_out = [] edge_index_down_out = [] edge_index_up_out = [] index", "is None): self.perm = torch.randperm(self.n) index = index0 for l in range(self.level): index", "np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y) grid = np.vstack([xx.ravel() for xx", "1) index2 = torch.tensor(index2).reshape(-1) index2 = index2 + num_nodes index2 = torch.tensor(index2, dtype=torch.long)", "N, is_periodic=False): grid_list = [] theta_list = [] edge_index_list = [] edge_index_list_cuda =", "number of splits:', self.splits ) self.perm = None self.idx = [] self.idx_all =", "= self.grid[self.idx] return self.idx def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float) def ball_connectivity(self, r, is_forward=False):", "d = 1 / n_x edge_index.append((i, i + 1)) edge_index.append((i + 1, i", "== self.num * self.l // batch_size2 out = torch.zeros(self.n, ) if cuda: out", "theta_d) theta_l = torch.tensor(theta_l, dtype=torch.float) print(theta_l.shape) theta_list.append(theta_l) # for the finest level, we", "set_torch(self, to_torch): self.to_torch = to_torch def set_float(self, to_float): self.to_float = to_float # normalization,", "attributes_boundary(self, f=None, theta=None): # if self.edge_index_boundary == None: # self.boundary_connectivity2d() if f is", "in range(self.level): edge_index_range[l, 0] = n_edge_index n_edge_index = n_edge_index + self.edge_index[l].shape[1] edge_index_range[l, 1]", "+ self.edge_index_down[l].shape[1] edge_index_down_range[l, 1] = n_edge_index edge_index_up_range[l, 1] = n_edge_index return edge_index_range, edge_index_down_range,", "4 + self.edge_features * 2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx =", "out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) # generate graph on Torus, with split", "generate multi-level graph class RandomMultiMeshGenerator(object): def __init__(self, real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshGenerator, self).__init__()", "if a != None: a = a.reshape(n_x, n_y) xs = np.linspace(0.0, 1.0, n_x)", "1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d / 0.1) ** 2),", "dtype=torch.long), \\ torch.tensor(self.edge_index_22, dtype=torch.long) def attributes(self, theta=None): if theta is None: edge_attr =", "edge_index_inter.append([x_i,x_j]) edge_index_inter = torch.tensor(edge_index_inter, dtype=torch.long) edge_index_inter = edge_index_inter.transpose(0,1) edge_index_list.append(edge_index_inter) edge_index_list_cuda.append(edge_index_inter.cuda()) print('edge_inter', edge_index_inter.shape) print(len(grid_list),len(edge_index_list),len(theta_list))", "= torch.cat(edge_index_out, dim=1) edge_index_down_out = torch.cat(edge_index_down_out, dim=1) edge_index_up_out = torch.cat(edge_index_up_out, dim=1) return edge_index_out,", "ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='constant', cval=0) out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,)", ") edge_attr[:, 1] = Y_difference.reshape(n_edges, ) edge_attr[:, 2] = distance.reshape(n_edges, ) edge_attr[:, 3:3", "dim=1).reshape(self.T,-1) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1, ) X = torch.cat([grid_split, theta_split], dim=1) else:", "different from numpy index2 = index2.repeat(2, axis = 0).repeat(2, axis = 1) index2", "if len(self.mean.shape) > len(sample_idx[0].shape): std = self.std[:,sample_idx]+ self.eps # T*batch*n mean = self.mean[:,sample_idx]", "n_l = h_x_l * h_y_l a = downsample(params, n_x, (2 ** l)) if", "num_nodes) def multi_pole_grid1d(theta, theta_d, s, N, is_periodic=False): grid_list = [] theta_list = []", "self.grid_sample_all = None self.edge_index = [] self.edge_index_down = [] self.edge_index_up = [] self.edge_attr", "std = self.std[:,sample_idx]+ self.eps # T*batch*n mean = self.mean[:,sample_idx] # x is in", "self.d] = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_12[:, 2 * self.d] = theta[self.edge_index_12[0]] edge_attr_12[:, 2 *", "sample(self, new_sample=True, index0=0): self.idx = [] self.grid_sample = [] if (new_sample) or (self.perm", "= Y[x::self.r, y::self.r].reshape(-1,) index_sub = self.index[x::self.r, y::self.r].reshape(-1,) n_sub = Y_sub.shape[0] if self.m >=", "out[t, x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='wrap') out", "boundary_size) vertice2 = np.tile(boundary, self.n) self.edge_index_boundary = np.stack([vertice2, vertice1], axis=0) self.n_edges_boundary = self.edge_index_boundary.shape[1]", "edge_index_down=edge_index_down, edge_index_up=edge_index_up, edge_index_range=edge_index_range, edge_index_down_range=edge_index_down_range, edge_index_up_range=edge_index_up_range, edge_attr_mid=edge_attr, edge_attr_down=edge_attr_down, edge_attr_up=edge_attr_up, sample_idx=idx[0])) return data def assembler(self,", "params=None): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.T, self.resolution,", "edge_attr.shape) return data def sample(self, theta, Y): theta_d = theta.shape[1] theta = theta.reshape(self.resolution,", "out = gaussian_filter(out, sigma=sigma, mode='constant', cval=0) out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) #", "2*self.d)) edge_attr_up = self.grid_sample_all[self.edge_index_up[l].T].reshape((self.n_edges_inter[l], 2*self.d)) self.edge_attr_down.append(torch.tensor(edge_attr_down)) self.edge_attr_up.append(torch.tensor(edge_attr_up)) else: theta = theta[self.idx_all] for l", "self.edge_attr_down = [] self.edge_attr_up = [] if theta is None: for l in", "= theta[self.edge_index_down[l][1]] self.edge_attr_down.append(torch.tensor(edge_attr_down, dtype=torch.float)) edge_attr_up[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_up[l].T].reshape( (self.n_edges_inter[l], 2 *", "i in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for j in", "torch.tensor(edge_index, dtype=torch.long) if edge_features == 0: edge_attr = grid_sample[edge_index.T].reshape(n_edges, -1) else: edge_attr =", "1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge1d(n_x, a=None): if", "self.n).reshape((self.n, 1)) else: self.n = 1 grids = [] for j in range(self.d):", "y): num_examples = x.size()[0] #Assume uniform mesh h = 1.0 / (x.size()[1] -", "= sklearn.metrics.pairwise_distances(grid, grid4) X_diff4, Y_diff4 = self.pairwise_difference(grid, grid4) PWD = np.stack([pwd0,pwd1,pwd2,pwd3,pwd4], axis=2) X_DIFF", "self.d] = theta[self.edge_index[0]] edge_attr[:, 2 * self.d + 1] = theta[self.edge_index[1]] edge_attr_12 =", "for the finest level, we construct the nearest neighbors (NN) if l==1: edge_index_nn", "= torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr =", "= torch.tensor(range(n_x * n_y), dtype=torch.long) # print('create multi_grid with size:', X.shape, edge_index.shape, edge_attr.shape,", "= self.mean[:,sample_idx] # x is in shape of batch*n or T*batch*n x =", "= x.size()[0] #Assume uniform mesh h = 1.0 / (x.size()[1] - 1.0) all_norms", "n_y, r, a): n = n_x * n_y xs = np.linspace(0.0, 1.0, n_x)", "a1 = a[x] a2 = a[x + 1] edge_attr.append((x / n_x, a1, a2))", "sklearn.metrics.pairwise_distances(grid, grid1) PWD = np.stack([pwd0,pwd1], axis=2) pwd = np.min(PWD, axis=2) self.edge_index = np.vstack(np.where(pwd", "self.grid_sample[l+1]) edge_index = np.vstack(np.where(pwd <= radius_inter[l])) + index edge_index[1, :] = edge_index[1, :]", "edge_index.shape, edge_attr.shape) return data def sample(self, theta, Y): theta_d = theta.shape[1] theta =", "-1) x = self.a*x + self.b x = x.view(s) return x def decode(self,", "pairwise_difference(self,grid1, grid2): n = grid1.shape[0] x1 = grid1[:,0] y1 = grid1[:,1] x2 =", "= grid grid1[:,0] = grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) PWD = np.stack([pwd0,pwd1], axis=2)", "real_space[j][1], mesh_size[j])) self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T def", "= [] self.edge_attr_down = [] self.edge_attr_up = [] self.n_edges_inner = [] self.n_edges_inter =", "self.d)) edge_attr[:, 2 * self.d] = theta[self.edge_index[l][0]] edge_attr[:, 2 * self.d + 1]", "assert len(pred) == self.r**2 // batch_size2 out = torch.zeros((self.T, self.resolution,self.resolution)) for t in", "self.r, (1,)) y = torch.randint(0, self.r, (1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1, 2)", "= to_float # normalization, pointwise gaussian class UnitGaussianNormalizer(object): def __init__(self, x, eps=0.00001): super(UnitGaussianNormalizer,", "mode='wrap') out = torch.tensor(out, dtype=torch.float) return out.reshape(self.T,self.n) def downsample(data, grid_size, l): data =", "grid1[:,0] = grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) PWD = np.stack([pwd0,pwd1], axis=2) pwd =", "y::self.r, :].reshape(-1, theta_d) Y_sub = Y[:,x::self.r, y::self.r].reshape(self.T,-1) index_sub = self.index[x::self.r, y::self.r].reshape(-1, ) n_sub", "* self.d + self.attr_features] = theta[self.edge_index[0]].view(-1, self.attr_features) edge_attr[:, 2 * self.d + self.attr_features:", "edge_index_down_range[l, 0] = n_edge_index edge_index_up_range[l, 0] = n_edge_index n_edge_index = n_edge_index + self.edge_index_down[l].shape[1]", "simple feedforward neural network class DenseNet(torch.nn.Module): def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet,", "def sample(self): perm = torch.randperm(self.n) self.idx = perm[:self.m] self.idx_i = perm[self.m: self.m+self.m_i] self.idx_both", "len(pred) == len(split_idx) assert len(pred) == self.num * self.l // batch_size2 out =", "theta_split], dim=1) else: grid_split = grid_sub theta_split = theta_sub Y_split = Y_sub.reshape(-1,) index_split", "dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2) data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr,", "else: theta = theta[self.idx] edge_attr = np.zeros((self.n_edges, 2 * self.d + 2*self.attr_features)) edge_attr[:,", "theta_split], dim=1) edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3", "theta, Y, params=None): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) Y =", "edge_index.shape, edge_attr.shape, index_split.shape) return data def sampleT(self, theta, Y, params=None): theta_d = theta.shape[1]", "- 1, -1, -1)) if self.to_float: x = x.astype(np.float32) if self.to_torch: x =", "= self.attributes(theta=theta_a) x = torch.cat([grid_all, theta_all[idx_all,:] ], dim=1) data.append(Data(x=x, edge_index_mid=edge_index, edge_index_down=edge_index_down, edge_index_up=edge_index_up, edge_index_range=edge_index_range,", "n_l # #construct inter-graph edge if l != depth-1: index2 = np.array(range(n_l//4)).reshape(h_x_l//2, h_y_l//2)", "if cuda: out = out.cuda() for i in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m)", "X1 - X2 Y1 = np.tile(y1.reshape(n, 1), [1, n]) Y2 = np.tile(y2.reshape(1, n),", "= torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def", "edge_attr_12[:, 2 * self.d] = theta[self.edge_index_12[0]] edge_attr_12[:, 2 * self.d + 1] =", "super(RangeNormalizer, self).__init__() mymin = torch.min(x, 0)[0].view(-1) mymax = torch.max(x, 0)[0].view(-1) self.a = (high", "if theta is None: for l in range(self.level): edge_attr = self.grid_sample_all[self.edge_index[l].T].reshape((self.n_edges_inner[l], 2*self.d)) self.edge_attr.append(torch.tensor(edge_attr))", "np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) if (y !=", "edge_index_inner, edge_attr_inner = grid(h_y_l, h_x_l) elif grid == 'grid_edge': X, edge_index_inner, edge_attr_inner =", "for i in range(self.l): perm = torch.randperm(self.n) perm = perm.reshape(self.num, self.m) for j", "edge_attr = np.zeros((self.n_edges, 2 * self.d + 2*self.attr_features)) edge_attr[:, 0:2 * self.d] =", "torch.randint(0,self.r,(1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub = theta[x::self.r, y::self.r, :].reshape(-1, theta_d)", "RandomMultiMeshSplitter(object): def __init__(self, real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshSplitter, self).__init__() self.d = len(real_space) self.ms", "= x.size() x = x.view(s[0], -1) x = (x - self.b)/self.a x =", "l in range(1, level+1): r_l = 2 ** (l - 1) s_l =", "self.d + 1] = theta[self.edge_index[l][1]] self.edge_attr.append(torch.tensor(edge_attr, dtype=torch.float)) for l in range(self.level - 1):", "Y_split = Y_sub.reshape(-1,) index_split = index_sub.reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges", "out = torch.tensor(out, dtype=torch.float) return out.reshape(self.T,self.n) def downsample(data, grid_size, l): data = data.reshape(-1,", "x = (x - self.mean) / (self.std + self.eps) return x def decode(self,", "theta_split[:, :self.edge_features] edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4+self.edge_features] = a[edge_index[0]] edge_attr[:, 4+self.edge_features:", "edge_index_up = self.ball_connectivity(radius_inner, radius_inter) edge_index_range, edge_index_down_range, edge_index_up_range = self.get_edge_index_range() edge_attr, edge_attr_down, edge_attr_up =", "edge_attr_boundary = np.zeros((self.n_edges_boundary, 2*self.d+2)) edge_attr_boundary[:,0:2*self.d] = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) edge_attr_boundary[:, 2 * self.d] = theta[self.edge_index_boundary[0]]", "m = self.m - n_sub perm = torch.randperm(self.n) idx = perm[:m] grid_sample =", "n, s)) self.boundary = np.concatenate([boundary1, boundary2, boundary3, boundary4]) def boundary_connectivity2d(self, stride=1): boundary =", "* self.d] = theta[self.edge_index[0]] edge_attr[:, 2 * self.d + 1] = theta[self.edge_index[1]] edge_attr_12", "1, i)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2),", "x = (x - self.b)/self.a x = x.view(s) return x #loss function with", "level, sample_sizes): super(RandomMultiMeshSplitter, self).__init__() self.d = len(real_space) self.ms = sample_sizes self.m = sample_sizes[0]", "= grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4+self.edge_features] = a[edge_index[0]] edge_attr[:, 4+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr", "X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr = np.zeros((n_edges, 4+self.edge_features*2))", "self.p, 1) if self.reduction: if self.size_average: return torch.mean(diff_norms/y_norms) else: return torch.sum(diff_norms/y_norms) return diff_norms/y_norms", "for xx in np.meshgrid(*grids)]).T self.idx = [] self.idx_all = None self.grid_sample = []", "= self.perm[index0: index_end] else: idx_all = torch.cat((self.perm[index0:], self.perm[: index_end]), dim=0) self.idx_all = idx_all", "x = torch.cat([grid_all, theta_all[idx_all,:] ], dim=1) data.append(Data(x=x, edge_index_mid=edge_index, edge_index_down=edge_index_down, edge_index_up=edge_index_up, edge_index_range=edge_index_range, edge_index_down_range=edge_index_down_range, edge_index_up_range=edge_index_up_range,", "grid3 = grid grid3[:, :] = grid[:, :] + 1 pwd3 = sklearn.metrics.pairwise_distances(grid,", "= [] self.n_edges_inter = [] def sample(self, new_sample=True, index0=0): self.idx = [] self.grid_sample", "seq) assert len(pred) == len(split_idx) assert len(pred[0]) == self.T assert len(pred) == self.r**2", "dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge1d(n_x, a=None):", "self.to_cuda: x = x.cuda() return x def set_cuda(self, to_cuda): self.to_cuda = to_cuda def", "index0=0): self.idx = [] self.grid_sample = [] if (new_sample) or (self.perm is None):", "self.m) for j in range(self.num): idx = perm[j,:].reshape(-1,) grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample =", "= a[x, y] a2 = a[x + 1, y] edge_index.append((i, i + 1))", "= 1 / n_y a1 = a[x, y] a2 = a[x, y+1] edge_index.append((i,", "= index + self.grid_sample[l].shape[0] edge_index_out = torch.cat(edge_index_out, dim=1) edge_index_down_out = torch.cat(edge_index_down_out, dim=1) edge_index_up_out", "self.grid = grid self.resolution = resolution self.n = resolution**d self.d = d self.m", "(y != n_y - 1): d = 1 / n_y edge_index.append((i, i +", "dtype=torch.float) return out.reshape(-1,) def assembleT(self, pred, split_idx, batch_size2, sigma=1): # pred is a", "!= n_x - 1): edge_index.append((i, i + 1)) edge_attr.append((1, 0, 0)) edge_index.append((i +", "assemble class RandomMultiMeshSplitter(object): def __init__(self, real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshSplitter, self).__init__() self.d =", "= torch.std(x) self.eps = eps def encode(self, x): x = (x - self.mean)", "self.size_average = size_average def abs(self, x, y): num_examples = x.size()[0] #Assume uniform mesh", "dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge_aug(n_x, n_y,", "theta.reshape(self.n,-1)[idx] X = torch.cat([grid_sample,theta_sample],dim=1) pwd = sklearn.metrics.pairwise_distances(grid_sample) edge_index = np.vstack(np.where(pwd <= self.radius)) n_edges", "dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out def splitter(self, radius_inner, radius_inter, theta_a, theta_all): # give", "edge_index_range=edge_index_range, edge_index_down_range=edge_index_down_range, edge_index_up_range=edge_index_up_range, edge_attr_mid=edge_attr, edge_attr_down=edge_attr_down, edge_attr_up=edge_attr_up, sample_idx=idx[0])) return data def assembler(self, out_list, sample_idx_list,", "+ 1) else: self.s = int(resolution/r) self.r = r self.n = resolution**2 self.m", "+ 1 pwd2 = sklearn.metrics.pairwise_distances(grid, grid2) X_diff2, Y_diff2 = self.pairwise_difference(grid, grid2) grid3 =", "d = 1 / n_y a1 = a[x, y] a2 = a[x, y+1]", "= theta[self.idx_both] edge_attr = np.zeros((self.n_edges, 3 * self.d)) edge_attr[:, 0:2 * self.d] =", ").cuda() else: pred = torch.zeros(self.n, ) for i in range(self.splits): pred[sample_idx_list[i]] = out_list[i].reshape(-1)", "theta, Y): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.resolution,", "y) # A simple feedforward neural network class DenseNet(torch.nn.Module): def __init__(self, layers, nonlinearity,", "[n, 1]) X_diff = X1 - X2 Y1 = np.tile(y1.reshape(n, 1), [1, n])", "grid1[:,0] = grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) X_diff1, Y_diff1 = self.pairwise_difference(grid, grid1) grid2", "len(pred) == len(split_idx) assert len(pred[0]) == self.T assert len(pred) == self.r**2 // batch_size2", "d = 1 / n_x a1 = a[x, y] a2 = a[x +", "try: self.data = scipy.io.loadmat(self.file_path) self.old_mat = True except: self.data = h5py.File(self.file_path) self.old_mat =", "for i2 in range(n): x2 = grid[i2] d = np.linalg.norm(x1-x2) if(d<=r): a1 =", "Y_diff4 = self.pairwise_difference(grid, grid4) PWD = np.stack([pwd0,pwd1,pwd2,pwd3,pwd4], axis=2) X_DIFF = np.stack([X_diff0,X_diff1,X_diff2,X_diff3,X_diff4], axis=2) Y_DIFF", "i1)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d", "edge_attr[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 * self.d] = theta[self.edge_index[0]]", "'grid': X, edge_index_inner, edge_attr_inner = grid(h_y_l, h_x_l) elif grid == 'grid_edge': X, edge_index_inner,", "if (x_j in range(s_l)): edge_index_nn.append([x_i,x_j]) edge_index_nn = torch.tensor(edge_index_nn, dtype=torch.long) edge_index_nn = edge_index_nn.transpose(0,1) edge_index_list.append(edge_index_nn)", "# normalization, Gaussian class GaussianNormalizer(object): def __init__(self, x, eps=0.00001): super(GaussianNormalizer, self).__init__() self.mean =", "else: xy = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) if theta is None: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:]) else:", "torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2) if params==None: data = Data(x=X,", "split_idx, batch_size2, sigma=1, cuda=False): assert len(pred) == len(split_idx) assert len(pred) == self.num *", "perm[:self.m] self.grid_sample = self.grid[self.idx] return self.idx def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float) def ball_connectivity(self,", "a != None: a1 = a[x, y] a2 = a[x + 1, y]", "= np.array(range(n_x)) # ys = np.array(range(n_y)) grid = np.vstack([xx.ravel() for xx in np.meshgrid(xs,", "theta_l = theta_l[:, ::r_l, :] theta_l = theta_l.reshape(N, n_l, theta_d) theta_l = torch.tensor(theta_l,", "edge_index = torch.tensor(edge_index, dtype=torch.long) if edge_features == 0: edge_attr = grid_sample[edge_index.T].reshape(n_edges, -1) else:", "= theta.reshape(self.n,-1)[idx] grid_split = torch.cat([grid_sub, grid_sample],dim=0) theta_split = torch.cat([theta_sub, theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1)", "__init__(self, x, eps=0.00001): super(GaussianNormalizer, self).__init__() self.mean = torch.mean(x) self.std = torch.std(x) self.eps =", "len(self.mean.shape) == len(sample_idx[0].shape): std = self.std[sample_idx] + self.eps # batch*n mean = self.mean[sample_idx]", "if self.to_float: x = x.astype(np.float32) if self.to_torch: x = torch.from_numpy(x) if self.to_cuda: x", "(y != n_y - 1): d = 1 / n_y a1 = a[x,", "0] = n_edge_index n_edge_index = n_edge_index + self.edge_index[l].shape[1] edge_index_range[l, 1] = n_edge_index n_edge_index", "perm = torch.randperm(self.n) self.idx = perm[:self.m] self.grid_sample = self.grid[self.idx] return self.idx def get_grid(self):", "3 * self.d)) edge_attr_22[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) edge_attr_22[:, 2 *", "Y_diff3 = self.pairwise_difference(grid, grid3) grid4 = grid grid4[:, 0] = grid[:, 0] +", "y in range(n_y): for x in range(n_x): i = y * n_x +", "def set_float(self, to_float): self.to_float = to_float # normalization, pointwise gaussian class UnitGaussianNormalizer(object): def", "return out.reshape(-1,) def assembleT(self, pred, split_idx, batch_size2, sigma=1): # pred is a list", "a[x, y] a2 = a[x + 1, y] edge_attr.append((x / n_x, y /", "dtype=torch.long) edge_index_down_range = torch.zeros((self.level-1,2), dtype=torch.long) edge_index_up_range = torch.zeros((self.level-1,2), dtype=torch.long) n_edge_index = 0 for", "* self.d +1] = theta[self.edge_index_boundary[1]] else: xy = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) if theta is None:", "low=0.0, high=1.0): super(RangeNormalizer, self).__init__() mymin = torch.min(x, 0)[0].view(-1) mymax = torch.max(x, 0)[0].view(-1) self.a", "+ 1] edge_attr.append((x / n_x, a1, a2)) edge_attr.append((x / n_x, a2, a1)) X", "r12)) self.edge_index_12[1,:] = self.edge_index_12[1,:] + self.m self.edge_index_21 = self.edge_index_12[[1,0],:] self.edge_index_22 = np.vstack(np.where(pwd22 <=", "dtype=torch.float) # # generate two-level graph class RandomTwoMeshGenerator(object): def __init__(self, real_space, mesh_size, sample_size,", "use graph network's data structure, # the edge index shall be stored as", "0:2 * self.d] = self.grid_sample_all[self.edge_index_up[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_up[:, 2 * self.d]", "self.idx_all = perm[:index] self.grid_sample_all = self.grid[self.idx_all] return self.idx, self.idx_all def get_grid(self): grid_out =", "grid, resolution, d=2, m=200, l=1, radius=0.25): super(RandomGridSplitter, self).__init__() self.grid = grid self.resolution =", "= edge_index.shape[1] edge_index = torch.tensor(edge_index, dtype=torch.long) if edge_features == 0: edge_attr = grid_sample[edge_index.T].reshape(n_edges,", "assemble(self, pred, split_idx, batch_size2, sigma=1, cuda=False): assert len(pred) == len(split_idx) assert len(pred) ==", "= self.edge_index.shape[1] self.n_edges_12 = self.edge_index_12.shape[1] self.n_edges_22 = self.edge_index_22.shape[1] return torch.tensor(self.edge_index, dtype=torch.long), \\ torch.tensor(self.edge_index_12,", "= a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx))", "y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='constant', cval=0) out =", "y2 = grid2[:,1] X1 = np.tile(x1.reshape(n, 1), [1, n]) X2 = np.tile(x2.reshape(1, n),", "for i1 in range(n): x1 = grid[i1] for i2 in range(n): x2 =", "a) # update index edge_index_inner = edge_index_inner + num_nodes edge_index_global.append(edge_index_inner) edge_attr_global.append(edge_attr_inner) # construct", "# X = torch.cat([torch.zeros(n_l, l * 2), X, torch.zeros(n_l, (depth - 1 -", "grid2[:,1] X1 = np.tile(x1.reshape(n, 1), [1, n]) X2 = np.tile(x2.reshape(1, n), [n, 1])", "edge index shall be stored as tensor instead of list # we concatenate", "int(((resolution - 1)/r) + 1) else: self.s = int(resolution/r) self.r = r self.n", ")[idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0) theta_split = torch.cat([theta_sub, theta_sample], dim=0) Y_split =", "sample_sizes self.level = level assert len(sample_sizes) == level assert len(mesh_size) == self.d if", "decode(self, x, sample_idx=None): x = (x * (self.std + self.eps)) + self.mean return", "mesh h = 1.0 / (x.size()[1] - 1.0) all_norms = (h**(self.d/self.p))*torch.norm(x.view(num_examples,-1) - y.view(num_examples,-1),", "range(s_l)): # if (xi, yi), (xj, yj) not NearestNeighbor if abs(x)>=2: # if", "if self.m > self.n: self.m = self.n self.idx = np.array(range(self.n)) self.grid_sample = self.grid", "np.stack([X_diff0,X_diff1,X_diff2,X_diff3,X_diff4], axis=2) Y_DIFF = np.stack([Y_diff0, Y_diff1, Y_diff2, Y_diff3, Y_diff4], axis=2) pwd = np.min(PWD,", "[] self.n_edges_inner = [] self.n_edges_inter = [] def sample(self, new_sample=True, index0=0): self.idx =", "1 self.layers = nn.ModuleList() for j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) def forward(self, x):", "ny] out[t, x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='wrap')", "1): d = 1 / n_y edge_index.append((i, i + n_x)) edge_index.append((i + n_x,", "for xx in np.meshgrid(*grids)]).T if self.m > self.n: self.m = self.n self.idx =", "dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge_aug_full(n_x, n_y,", "Y_sub.shape[0] if self.m >= n_sub: m = self.m - n_sub perm = torch.randperm(self.n)", "= np.vstack(np.where(pwd <= self.radius)) n_edges = edge_index.shape[1] edge_index = torch.tensor(edge_index, dtype=torch.long) if edge_features", "dtype=torch.float) def ball_connectivity(self, radius_inner, radius_inter): assert len(radius_inner) == self.level assert len(radius_inter) == self.level", "1] = theta[self.edge_index_12[1]] edge_attr_21 = np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_21[:, 0:2 * self.d]", "= (np.where(pwd <= self.radius)[0], np.where(pwd <= self.radius)[1], pwd_index) distance = PWD[PWD_index] X_difference =", "sigma=sigma, mode='wrap') out = torch.tensor(out, dtype=torch.float) return out.reshape(self.T,self.n) def downsample(data, grid_size, l): data", "data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, params=params)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def", "> len(sample_idx[0].shape): std = self.std[:,sample_idx]+ self.eps # T*batch*n mean = self.mean[:,sample_idx] # x", "dim=-1).reshape(2,-1) edge_index_inter2 = torch.cat([index2,index1], dim=-1).reshape(2,-1) edge_index_inter = torch.cat([edge_index_inter1, edge_index_inter2], dim=1) edge_attr_inter1 = torch.tensor((0,", ") edge_attr[:, 1] = Y_difference.reshape(n_edges, ) edge_attr[:, 2] = distance.reshape(n_edges, ) edge_attr[:, 3:3+self.edge_features]", "* 2)) a = theta_split[:, :self.edge_features] edge_attr[:, 0] = X_difference.reshape(n_edges, ) edge_attr[:, 1]", "(time seq) assert len(pred) == len(split_idx) assert len(pred[0]) == self.T assert len(pred) ==", "= nn.ModuleList() for j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) if j != self.n_layers -", "edge_index_up_range[l, 1] = n_edge_index return edge_index_range, edge_index_down_range, edge_index_up_range def attributes(self, theta=None): self.edge_attr =", "= np.vstack(np.where(pwd <= self.radius)) pwd_index = pwd_index[np.where(pwd <= self.radius)] PWD_index = (np.where(pwd <=", "they are not NearestNeighbor edge_index_inter = [] for x_i in range(s_l): for x", "y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split, params=params) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return", "idx_all = torch.cat((self.perm[index0:], self.perm[: index_end]), dim=0) self.idx_all = idx_all self.grid_sample_all = self.grid[self.idx_all] return", "return torch.tensor(self.grid, dtype=torch.float) def attributes(self, f=None, theta=None): if f is None: if theta", "x = torch.randint(0,self.r,(1,)) y = torch.randint(0,self.r,(1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub", "self.mean) / (self.std + self.eps) return x def decode(self, x, sample_idx=None): if sample_idx", "= int(resolution/r) self.r = r self.n = resolution**2 self.m = m self.T =", "-1) edge_attr[:, 2*self.d] = a[edge_index[0]] edge_attr[:, 2*self.d+1] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float)", "torch.tensor([x,y],dtype=torch.long).reshape(1,2) if params==None: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) else: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, params=params))", "[] self.edge_index_down = [] self.edge_index_up = [] self.n_edges_inner = [] self.n_edges_inter = []", "gaussian_filter ################################################# # # Utilities # ################################################# device = torch.device('cuda' if torch.cuda.is_available() else", "self.eps = eps def encode(self, x): x = (x - self.mean) / (self.std", "= torch.cat(edge_index_down_out, dim=1) edge_index_up_out = torch.cat(edge_index_up_out, dim=1) return edge_index_out, edge_index_down_out, edge_index_up_out def get_edge_index_range(self):", "return torch.tensor(self.edge_index_boundary, dtype=torch.long) def attributes_boundary(self, f=None, theta=None): # if self.edge_index_boundary == None: #", "= torch.cat([index_sub, idx], dim=0).reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) else: grid_split = grid_sub", "self.edge_features * 2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) if", "edge_attr_up = self.grid_sample_all[self.edge_index_up[l].T].reshape((self.n_edges_inter[l], 2*self.d)) self.edge_attr_down.append(torch.tensor(edge_attr_down)) self.edge_attr_up.append(torch.tensor(edge_attr_up)) else: theta = theta[self.idx_all] for l in", "edge_attr[:, 3+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y],", "= y * n_x + x if (x != n_x - 1): edge_index.append((i,", "if i==0: idx, idx_all = self.sample(new_sample=True, index0=index) else: idx, idx_all = self.sample(new_sample=False, index0=index)", "dtype=torch.long), n_edges, distance, X_difference, Y_difference def get_data(self, theta, params=None): theta_d = theta.shape[1] theta", "self.m_i = induced_point assert len(mesh_size) == self.d if self.d == 1: self.n =", "def __init__(self, grid, resolution, r, m=100, radius=0.15, edge_features=1): super(DownsampleGridSplitter, self).__init__() # instead of", "= theta_split[:, :self.edge_features] edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4 + self.edge_features] =", "'cpu') # reading data class MatReader(object): def __init__(self, file_path, to_torch=True, to_cuda=False, to_float=True): super(MatReader,", "torch.tensor(xs, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr", "np.array(range(n - s, n)) boundary3 = np.array(range(s, n, s)) boundary4 = np.array(range(2 *", "network class DenseNet(torch.nn.Module): def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet, self).__init__() self.n_layers =", "torch.tensor(range(n_l), dtype=torch.long) index1 = index1 + num_nodes num_nodes += n_l # #construct inter-graph", "vertice1 = np.repeat(vertice1, boundary_size) vertice2 = np.tile(boundary, self.n) self.edge_index_boundary = np.stack([vertice2, vertice1], axis=0)", "+ self.m) % self.n grid, grid_all = self.get_grid() edge_index, edge_index_down, edge_index_up = self.ball_connectivity(radius_inner,", "torch.cat([grid_sub, grid_sample],dim=0) theta_split = torch.cat([theta_sub, theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges = self.ball_connectivity(grid_split)", "pwd22 = sklearn.metrics.pairwise_distances(self.grid_sample_i) self.edge_index = np.vstack(np.where(pwd <= r11)) self.edge_index_12 = np.vstack(np.where(pwd12 <= r12))", "= x_i + x # if (xj, yj) is a valid node if", "y] edge_index.append((i, i + 1)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)),", "self._load_file() def _load_file(self): try: self.data = scipy.io.loadmat(self.file_path) self.old_mat = True except: self.data =", "np.stack([pwd0,pwd1], axis=2) pwd = np.min(PWD, axis=2) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges =", "theta[x::self.r, y::self.r,:].reshape(-1,theta_d) perm = torch.randperm(self.n) m = self.m - grid_sub.shape[0] idx = perm[:m]", "= torch.zeros((self.level-1,2), dtype=torch.long) edge_index_up_range = torch.zeros((self.level-1,2), dtype=torch.long) n_edge_index = 0 for l in", "edge_index_up_range[l, 0] = n_edge_index n_edge_index = n_edge_index + self.edge_index_down[l].shape[1] edge_index_down_range[l, 1] = n_edge_index", "self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_22 = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) else: theta = theta[self.idx_both] edge_attr = np.zeros((self.n_edges,", "super(MatReader, self).__init__() self.to_torch = to_torch self.to_cuda = to_cuda self.to_float = to_float self.file_path =", "perm[:m] grid_sample = self.grid.reshape(self.n, -1)[idx] theta_sample = theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.T, self.n)[:,idx]", "* self.d] = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_21[:, 2 * self.d] = theta[self.edge_index_21[0]] edge_attr_21[:, 2", "range(n_y): for x in range(n_x): i = y * n_x + x if", "edge_index.append((i, i + n_x)) edge_attr.append((0, 1, 0)) edge_index.append((i + n_x, i)) edge_attr.append((0, -1,", "edge_index.append((i + n_x, i)) if a != None: a1 = a[x, y] a2", "edge_index.append((i, i + n_x)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d)", "index2 + num_nodes index2 = torch.tensor(index2, dtype=torch.long) edge_index_inter1 = torch.cat([index1,index2], dim=-1).reshape(2,-1) edge_index_inter2 =", "with size:', X.shape, edge_index.shape, edge_attr.shape, mask_index.shape) return (X, edge_index, edge_attr, mask_index, num_nodes) def", "_, l in enumerate(self.layers): x = l(x) return x class DenseNet_sin(torch.nn.Module): def __init__(self,", "np.array(range(self.n)) self.idx_i = self.idx self.idx_both = self.idx self.grid_sample = self.grid self.grid_sample_i = self.grid", "self.grid = grid.reshape(resolution, resolution,2) # self.theta = theta.reshape(resolution, resolution,-1) # self.y = y.reshape(resolution,", "[] index = 0 for i in range(self.splits): if i==0: idx, idx_all =", "0, 0)) edge_index.append((i + 1, i)) edge_attr.append((-1, 0, 0)) if (y != n_y", "torch.tensor(range(n_x * n_y), dtype=torch.long) # print('create multi_grid with size:', X.shape, edge_index.shape, edge_attr.shape, mask_index.shape)", "a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=idx)) print('test', len(data), X.shape, edge_index.shape,", "s, n)) boundary3 = np.array(range(s, n, s)) boundary4 = np.array(range(2 * s -", "self.d : 2 * self.d + self.attr_features] = theta[self.edge_index[0]].view(-1, self.attr_features) edge_attr[:, 2 *", "dtype=torch.float) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=idx)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def", "= sample_size self.m_i = induced_point assert len(mesh_size) == self.d if self.d == 1:", "dtype=torch.float) def ball_connectivity(self, r, is_forward=False): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) self.edge_index = np.vstack(np.where(pwd <= r))", "torch.tensor([x, y], dtype=torch.long).reshape(1, 2) if params==None: data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx,", "def forward(self, x): for _, l in enumerate(self.layers): x = l(x) return x", "i2)) edge_index.append((i2, i )) if a != None: a1 = a[x] a2 =", "!= None: a = a.reshape(n_x, n_y) xs = np.linspace(0.0, 1.0, n_x) ys =", "theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub = Y[:,x::self.r, y::self.r].reshape(self.T,-1) index_sub = self.index[x::self.r, y::self.r].reshape(-1, )", "self._load_file() def read_field(self, field): x = self.data[field] if not self.old_mat: x = x[()]", "self.resolution, theta_d) Y = Y.reshape(self.T, self.resolution, self.resolution) x = torch.randint(0, self.r, (1,)) y", "params=params) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def assemble(self, pred, split_idx,", "= np.transpose(x, axes=range(len(x.shape) - 1, -1, -1)) if self.to_float: x = x.astype(np.float32) if", "torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge1d(n_x,", "= torch.cat(edge_index_up_out, dim=1) return edge_index_out, edge_index_down_out, edge_index_up_out def get_edge_index_range(self): # in order to", "= split_idx_i[j] if self.resolution%2==1: if x==0: nx = self.s else: nx = self.s-1", "edge_attr = [] for y in range(n_y): for x in range(n_x): i =", "batch_size2 out = torch.zeros(self.n, ) if cuda: out = out.cuda() for i in", "std = self.std + self.eps # n mean = self.mean else: if len(self.mean.shape)", "self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) rbf", "std) + mean return x def cuda(self): self.mean = self.mean.cuda() self.std = self.std.cuda()", "= self.n self.idx = np.array(range(self.n)) self.idx_i = self.idx self.idx_both = self.idx self.grid_sample =", "dtype=torch.float) print(grid_l.shape) grid_list.append(grid_l) theta_l = theta[:,:,:theta_d].reshape(N, s, theta_d) theta_l = theta_l[:, ::r_l, :]", "/ 0.01) ** 2))) if (y != n_y - 1): d = 1", "batch*n mean = self.mean[sample_idx] if len(self.mean.shape) > len(sample_idx[0].shape): std = self.std[:,sample_idx]+ self.eps #", "high def encode(self, x): s = x.size() x = x.view(s[0], -1) x =", "n_edge_index edge_index_up_range[l, 1] = n_edge_index return edge_index_range, edge_index_down_range, edge_index_up_range def attributes(self, theta=None): self.edge_attr", "nn.ModuleList() for j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) if j != self.n_layers - 1:", "def decode(self, x): s = x.size() x = x.view(s[0], -1) x = (x", "= m self.T = T self.radius = radius self.edge_features = edge_features self.index =", "pwd = sklearn.metrics.pairwise_distances(grid_sample) edge_index = np.vstack(np.where(pwd <= self.radius)) n_edges = edge_index.shape[1] edge_index =", "np.exp(-(d / 0.01) ** 2))) edge_index.append((i + 1, i)) edge_attr.append((d, a2, a1, 1", "self.grid[self.idx_all] return self.idx, self.idx_all def get_grid(self): grid_out = [] for grid in self.grid_sample:", "1 - l) * 2)], dim=1) # else: # X_l = torch.tensor(l, dtype=torch.float).repeat(n_l,", "y = torch.randint(0,self.r,(1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub = theta[x::self.r, y::self.r,", "a[x + 1, y] edge_index.append((i, i + 1)) edge_attr.append((d, a1, a2, 1 /", "num_nodes index2 = torch.tensor(index2, dtype=torch.long) edge_index_inter1 = torch.cat([index1,index2], dim=-1).reshape(2,-1) edge_index_inter2 = torch.cat([index2,index1], dim=-1).reshape(2,-1)", "= torch.from_numpy(x) if self.to_cuda: x = x.cuda() return x def set_cuda(self, to_cuda): self.to_cuda", "theta[:,:,:theta_d].reshape(N, s, theta_d) theta_l = theta_l[:, ::r_l, :] theta_l = theta_l.reshape(N, n_l, theta_d)", "(xi, yi), (xj, yj) not NearestNeighbor if abs(x)>=2: # if their parents are", "theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) def get_boundary(self): s = self.s n = self.n", "theta_sample = theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.T, self.n)[:,idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0)", "theta_a, theta_all): # give a test mesh, generate a list of data data", "dim=0) mask_index = torch.tensor(range(n_x * n_y), dtype=torch.long) # print('create multi_grid with size:', X.shape,", "index = 0 for l in range(self.level): self.idx.append(perm[index: index+self.m[l]]) self.grid_sample.append(self.grid[self.idx[l]]) index = index+self.m[l]", "a[edge_index[0]] edge_attr[:, 3+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,", "nn from scipy.ndimage import gaussian_filter ################################################# # # Utilities # ################################################# device =", "= grid_edge(h_y_l, h_x_l, a) # update index edge_index_inner = edge_index_inner + num_nodes edge_index_global.append(edge_index_inner)", "theta_list = [] edge_index_list = [] edge_index_list_cuda = [] level = int(np.log2(s) -", "edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) else: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, params=params)) print('test', len(data), X.shape, edge_index.shape,", "edge_attr = f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr,", "self.std.cpu() # normalization, Gaussian class GaussianNormalizer(object): def __init__(self, x, eps=0.00001): super(GaussianNormalizer, self).__init__() self.mean", "dtype=torch.float) return X, edge_index, edge_attr def grid_edge_aug_full(n_x, n_y, r, a): n = n_x", "Data import torch.nn as nn from scipy.ndimage import gaussian_filter ################################################# # # Utilities", "None self.old_mat = None self._load_file() def _load_file(self): try: self.data = scipy.io.loadmat(self.file_path) self.old_mat =", "self.eps) return x def decode(self, x, sample_idx=None): if sample_idx is None: std =", "vertice1 = np.array(range(self.n)) vertice1 = np.repeat(vertice1, boundary_size) vertice2 = np.tile(boundary, self.n) self.edge_index_boundary =", ") edge_attr[:, 3:3+self.edge_features] = a[edge_index[0]] edge_attr[:, 3+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr,", "dtype=torch.float), \\ torch.tensor(self.grid_sample_i, dtype=torch.float), \\ torch.tensor(self.grid_sample_both, dtype=torch.float) def ball_connectivity(self, r11, r12, r22): pwd", "dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2) if params==None: data = Data(x=X, y=Y_split,", "self.idx = perm[:self.m] self.idx_i = perm[self.m: self.m+self.m_i] self.idx_both = perm[: self.m+self.m_i] self.grid_sample =", "(grid_size // l) ** 2) return data def simple_grid(n_x, n_y): xs = np.linspace(0.0,", "self.edge_index_12[1,:] = self.edge_index_12[1,:] + self.m self.edge_index_21 = self.edge_index_12[[1,0],:] self.edge_index_22 = np.vstack(np.where(pwd22 <= r22))", "n_x + x if (x != n_x - 1): edge_index.append((i, i + 1))", "= sample_sizes[0] self.level = level assert len(sample_sizes) == level assert len(mesh_size) == self.d", "edge_index = torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr", "self).__init__() self.to_torch = to_torch self.to_cuda = to_cuda self.to_float = to_float self.file_path = file_path", "class LpLoss(object): def __init__(self, d=2, p=2, size_average=True, reduction=True): super(LpLoss, self).__init__() #Dimension and Lp-norm", "sklearn.metrics.pairwise_distances(grid, grid2) X_diff2, Y_diff2 = self.pairwise_difference(grid, grid2) grid3 = grid grid3[:, :] =", "= self.pairwise_difference(grid, grid3) grid4 = grid grid4[:, 0] = grid[:, 0] + 1", "sklearn.metrics.pairwise_distances(self.grid_sample) rbf = np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf) self.edge_index = np.vstack(np.where(sample)) self.n_edges = self.edge_index.shape[1]", "= index2 + num_nodes index2 = torch.tensor(index2, dtype=torch.long) edge_index_inter1 = torch.cat([index1,index2], dim=-1).reshape(2,-1) edge_index_inter2", "gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid) rbf = np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf) self.edge_index =", "in np.meshgrid(*grids)]).T self.idx = [] self.idx_all = None self.grid_sample = [] self.grid_sample_all =", "- 1: x = torch.sin(x) return x # generate graphs on square domain", "= l self.radius = radius assert self.n % self.m == 0 self.num =", "# generate multi-level graph, with split and assemble class RandomMultiMeshSplitter(object): def __init__(self, real_space,", "pwd = sklearn.metrics.pairwise_distances(self.grid) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index,", "dtype=torch.float) def ball_connectivity(self, r11, r12, r22): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) pwd12 = sklearn.metrics.pairwise_distances(self.grid_sample, self.grid_sample_i)", "= torch.cat([theta_sub, theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr = np.zeros((n_edges,", "idx + nx * ny] out[t, x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out", "/ 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i2, i1)) edge_attr.append((d, a2,", "self.edge_attr_up = [] if theta is None: for l in range(self.level): edge_attr =", "= [] for x_i in range(s_l): for x in (-1,1): x_j = x_i", "downsample sub-grids self.grid = grid.reshape(resolution, resolution,2) # self.theta = theta.reshape(resolution, resolution,-1) # self.y", "if theta is None: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:],", "x if is_periodic: x_j = x_j % s_l # if (xj, yj) is", "edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3 + self.edge_features", "pwd12 = sklearn.metrics.pairwise_distances(self.grid_sample, self.grid_sample_i) pwd22 = sklearn.metrics.pairwise_distances(self.grid_sample_i) self.edge_index = np.vstack(np.where(pwd <= r11)) self.edge_index_12", "n_edge_index n_edge_index = n_edge_index + self.edge_index[l].shape[1] edge_index_range[l, 1] = n_edge_index n_edge_index = 0", "# torch.repeat is different from numpy index2 = index2.repeat(2, axis = 0).repeat(2, axis", "edge_index.append((i, i + n_x)) edge_index.append((i + n_x, i)) if a != None: a1", "= torch.tensor(out, dtype=torch.float) return out.reshape(-1,) # generate graph on Torus, with split and", "yj) is a valid node if (x_j in range(s_l)): edge_index_nn.append([x_i,x_j]) edge_index_nn = torch.tensor(edge_index_nn,", "dtype=torch.float).repeat(n_l, 1) edge_attr_inter = torch.cat([edge_attr_inter1, edge_attr_inter2], dim=0) edge_index_global.append(edge_index_inter) edge_attr_global.append(edge_attr_inter) X = torch.cat(X_global, dim=0)", "len(pred) == len(split_idx) assert len(pred) == self.r**2 // batch_size2 out = torch.zeros((self.resolution,self.resolution)) for", "= [] self.n_edges_inner = [] self.n_edges_inter = [] edge_index_out = [] edge_index_down_out =", "out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) # generate multi-level graph, with split and", "the nearest neighbors (NN) if l==1: edge_index_nn = [] for x_i in range(s_l):", "- 1: if normalize: self.layers.append(nn.BatchNorm1d(layers[j+1])) self.layers.append(nonlinearity()) if out_nonlinearity is not None: self.layers.append(out_nonlinearity()) def", "super(RandomMultiMeshSplitter, self).__init__() self.d = len(real_space) self.ms = sample_sizes self.m = sample_sizes[0] self.level =", "np.linalg.norm(x1-x2) if(d<=r): a1 = a[i1] a2 = a[i2] edge_index.append((i1, i2)) edge_attr.append((d, a1, a2,", "f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index_boundary[0]], theta[self.edge_index_boundary[1]]) return torch.tensor(edge_attr_boundary, dtype=torch.float) #", "h5py.File(self.file_path) self.old_mat = False def load_file(self, file_path): self.file_path = file_path self._load_file() def read_field(self,", "X_DIFF[PWD_index] Y_difference = Y_DIFF[PWD_index] n_edges = edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges, distance, X_difference,", "generate multi-level graph, with split and assemble class RandomMultiMeshSplitter(object): def __init__(self, real_space, mesh_size,", "theta[self.edge_index_21[1]] edge_attr_22 = np.zeros((self.n_edges_22, 3 * self.d)) edge_attr_22[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22,", "give a test mesh, generate a list of data data = [] index", "# generate graph, with split and assemble with downsample class DownsampleGridSplitter(object): def __init__(self,", "attributes(self, theta=None): if theta is None: edge_attr = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr_12 = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12,", "- n_sub perm = torch.randperm(self.n) idx = perm[:m] grid_sample = self.grid.reshape(self.n, -1)[idx] theta_sample", "data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) else: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, params=params)) print('test', len(data), X.shape,", "edge_attr_up = np.zeros((self.n_edges_inter[l], 2 * self.d + 2)) edge_attr_down[:, 0:2 * self.d] =", "a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) print('test',", "index_split = torch.cat([index_sub, idx], dim=0).reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) else: grid_split =", "np.meshgrid(*grids)]).T if self.m > self.n: self.m = self.n self.idx = np.array(range(self.n)) self.idx_i =", "= np.zeros((self.n_edges, 2*self.d+2)) edge_attr[:,0:2*self.d] = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) edge_attr[:, 2 * self.d] = theta[self.edge_index[0]] edge_attr[:,", "= torch.mean(x) self.std = torch.std(x) self.eps = eps def encode(self, x): x =", "self.to_torch: x = torch.from_numpy(x) if self.to_cuda: x = x.cuda() return x def set_cuda(self,", "= theta[self.edge_index_boundary[0]] edge_attr_boundary[:, 2 * self.d +1] = theta[self.edge_index_boundary[1]] else: xy = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1))", "self.idx self.grid_sample = self.grid self.grid_sample_i = self.grid self.grid_sample_both = self.grid def sample(self): perm", "DenseNet_sin(torch.nn.Module): def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet_sin, self).__init__() self.n_layers = len(layers) -", "ys)]).T edge_index = [] edge_attr = [] for y in range(n_y): for x", "a[x + 1, y] edge_attr.append((x / n_x, y / n_y, a1, a2)) edge_attr.append((y/n_y,", "grid, resolution, r, m=100, radius=0.15, T=None, edge_features=1, ): super(TorusGridSplitter, self).__init__() self.grid = grid.reshape(resolution,", "torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge_aug(n_x, n_y, a): a = a.reshape(n_x,", "#construct inter-graph edge if l != depth-1: index2 = np.array(range(n_l//4)).reshape(h_x_l//2, h_y_l//2) # torch.repeat", "in np.meshgrid(*grids)]).T def ball_connectivity(self, r): pwd = sklearn.metrics.pairwise_distances(self.grid) self.edge_index = np.vstack(np.where(pwd <= r))", "theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub = Y[x::self.r, y::self.r].reshape(-1,) index_sub = self.index[x::self.r, y::self.r].reshape(-1,) n_sub", "in range(self.l): perm = torch.randperm(self.n) perm = perm.reshape(self.num, self.m) for j in range(self.num):", "super(RandomTwoMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_size self.m_i = induced_point assert len(mesh_size)", "to_cuda): self.to_cuda = to_cuda def set_torch(self, to_torch): self.to_torch = to_torch def set_float(self, to_float):", "= self.std + self.eps # n mean = self.mean else: if len(self.mean.shape) ==", "edge_index_down_out.append(torch.tensor(edge_index, dtype=torch.long)) self.edge_index_up.append(edge_index[[1,0],:]) edge_index_up_out.append(torch.tensor(edge_index[[1,0],:], dtype=torch.long)) self.n_edges_inter.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0] edge_index_out =", "dim=1) edge_attr = torch.cat(edge_attr_global, dim=0) mask_index = torch.tensor(range(n_x * n_y), dtype=torch.long) # print('create", "+ index edge_index[1, :] = edge_index[1, :] + self.grid_sample[l].shape[0] self.edge_index_down.append(edge_index) edge_index_down_out.append(torch.tensor(edge_index, dtype=torch.long)) self.edge_index_up.append(edge_index[[1,0],:])", "n_y xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y) grid =", "mesh_size[0] self.grid = np.linspace(real_space[0][0], real_space[0][1], self.n).reshape((self.n, 1)) else: self.n = 1 grids =", "self.m = self.n self.idx = np.array(range(self.n)) self.grid_sample = self.grid def sample(self): perm =", "__init__(self, d=2, p=2, size_average=True, reduction=True): super(LpLoss, self).__init__() #Dimension and Lp-norm type are postive", "else: nx = self.s ny = self.s # pred_ij = pred_i[idx : idx", "Y_difference.reshape(n_edges, ) edge_attr[:, 2] = distance.reshape(n_edges, ) edge_attr[:, 3:3+self.edge_features] = a[edge_index[0]] edge_attr[:, 3+self.edge_features:", "torch.cat([grid_sub, grid_sample], dim=0) theta_split = torch.cat([theta_sub, theta_sample], dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=0).reshape(-1,)", "l) h_y_l = n_y // (2 ** l) n_l = h_x_l * h_y_l", "= [] edge_index_list = [] edge_index_list_cuda = [] level = int(np.log2(s) - 1)", "(high - low)/(mymax - mymin) self.b = -self.a*mymax + high def encode(self, x):", "(x * std) + mean return x def cuda(self): self.mean = self.mean.cuda() self.std", "pred_i[idx : idx + nx * ny] out[t, x::self.r, y::self.r] = pred_ij[:nx *", "eps def encode(self, x): x = (x - self.mean) / (self.std + self.eps)", "np.random.binomial(1,rbf) self.edge_index = np.vstack(np.where(sample)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def attributes(self, f=None,", "on square domain class SquareMeshGenerator(object): def __init__(self, real_space, mesh_size): super(SquareMeshGenerator, self).__init__() self.d =", "len(real_space) self.m = sample_size self.m_i = induced_point assert len(mesh_size) == self.d if self.d", "index_split = torch.cat([index_sub, idx], dim=0).reshape(-1, ) X = torch.cat([grid_split, theta_split], dim=1) else: grid_split", "for x in range(-3,4): x_j = x_i + x # if (xj, yj)", "= to_cuda self.to_float = to_float self.file_path = file_path self.data = None self.old_mat =", "(index+self.ms[l]) % self.n if index < index_end: idx = self.perm[index: index_end] else: idx", "is a valid node if is_periodic: x_j = x_j % s_l if (x_j", "self.resolution, theta_d) Y = Y.reshape(self.resolution, self.resolution) x = torch.randint(0,self.r,(1,)) y = torch.randint(0,self.r,(1,)) grid_sub", "PWD_index = (np.where(pwd <= self.radius)[0], np.where(pwd <= self.radius)[1], pwd_index) distance = PWD[PWD_index] X_difference", "/ np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d / 0.1) ** 2), np.exp(-(d", "x_j = x_j % s_l # if (xj, yj) is a valid node", "self.grid_sample_i) pwd22 = sklearn.metrics.pairwise_distances(self.grid_sample_i) self.edge_index = np.vstack(np.where(pwd <= r11)) self.edge_index_12 = np.vstack(np.where(pwd12 <=", "X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr", "else: edge_attr = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) def get_boundary(self): s", "# generate graphs on square domain class SquareMeshGenerator(object): def __init__(self, real_space, mesh_size): super(SquareMeshGenerator,", "y] a2 = a[x, y+1] edge_attr.append((x/n_x, y/n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x, a2, a1))", "# the edge index shall be stored as tensor instead of list #", "= theta[self.edge_index[0]] edge_attr[:, 2 * self.d +1] = theta[self.edge_index[1]] else: xy = self.grid[self.edge_index.T].reshape((self.n_edges,-1))", "= grid self.resolution = resolution self.n = resolution**d self.d = d self.m =" ]
[ "print(\"mean: \" + str((self.sum/self.count)*1000)) print(\"max: \" + str(self.max*1000)) i = 0 for n", "time.sleep(1) if self.count !=0: print(\"mean: \" + str((self.sum/self.count)*1000)) print(\"max: \" + str(self.max*1000)) i", "#!/usr/bin/env python3 import rospy import time from sensor_msgs.msg import Imu class SubTest(): def", "= rospy.get_time() - msg.header.stamp.to_sec() self.arrt.append(diff) self.arrn.append(msg.header.seq) self.sum += diff self.count +=1 self.max =", "self.sub = rospy.Subscriber(\"test\", Imu, self.cb, queue_size=1) self.f = open(\"latencies\", 'w') while not rospy.is_shutdown():", "self.arrt.append(diff) self.arrn.append(msg.header.seq) self.sum += diff self.count +=1 self.max = max(self.max, diff) if __name__", "self.arrt = [] self.arrn = [] self.sum =0 self.count=0 self.max = 0 self.sub", "rospy.get_time() - msg.header.stamp.to_sec() self.arrt.append(diff) self.arrn.append(msg.header.seq) self.sum += diff self.count +=1 self.max = max(self.max,", "+ str((self.sum/self.count)*1000)) print(\"max: \" + str(self.max*1000)) i = 0 for n in self.arrn:", "sensor_msgs.msg import Imu class SubTest(): def __init__(self): rospy.init_node(\"test_sub\") self.arrt = [] self.arrn =", "= rospy.Subscriber(\"test\", Imu, self.cb, queue_size=1) self.f = open(\"latencies\", 'w') while not rospy.is_shutdown(): time.sleep(1)", "self.arrn: self.f.write(str(n) + \",\" + str(self.arrt[i]*1000) + \"\\n\") i+=1 self.f.close() def cb(self, msg:Imu):", "!=0: print(\"mean: \" + str((self.sum/self.count)*1000)) print(\"max: \" + str(self.max*1000)) i = 0 for", "= [] self.arrn = [] self.sum =0 self.count=0 self.max = 0 self.sub =", "self.max = 0 self.sub = rospy.Subscriber(\"test\", Imu, self.cb, queue_size=1) self.f = open(\"latencies\", 'w')", "for n in self.arrn: self.f.write(str(n) + \",\" + str(self.arrt[i]*1000) + \"\\n\") i+=1 self.f.close()", "i = 0 for n in self.arrn: self.f.write(str(n) + \",\" + str(self.arrt[i]*1000) +", "[] self.sum =0 self.count=0 self.max = 0 self.sub = rospy.Subscriber(\"test\", Imu, self.cb, queue_size=1)", "+ \"\\n\") i+=1 self.f.close() def cb(self, msg:Imu): diff = rospy.get_time() - msg.header.stamp.to_sec() self.arrt.append(diff)", "if self.count !=0: print(\"mean: \" + str((self.sum/self.count)*1000)) print(\"max: \" + str(self.max*1000)) i =", "self.f.close() def cb(self, msg:Imu): diff = rospy.get_time() - msg.header.stamp.to_sec() self.arrt.append(diff) self.arrn.append(msg.header.seq) self.sum +=", "time from sensor_msgs.msg import Imu class SubTest(): def __init__(self): rospy.init_node(\"test_sub\") self.arrt = []", "\",\" + str(self.arrt[i]*1000) + \"\\n\") i+=1 self.f.close() def cb(self, msg:Imu): diff = rospy.get_time()", "0 for n in self.arrn: self.f.write(str(n) + \",\" + str(self.arrt[i]*1000) + \"\\n\") i+=1", "\" + str((self.sum/self.count)*1000)) print(\"max: \" + str(self.max*1000)) i = 0 for n in", "rospy.is_shutdown(): time.sleep(1) if self.count !=0: print(\"mean: \" + str((self.sum/self.count)*1000)) print(\"max: \" + str(self.max*1000))", "SubTest(): def __init__(self): rospy.init_node(\"test_sub\") self.arrt = [] self.arrn = [] self.sum =0 self.count=0", "rospy import time from sensor_msgs.msg import Imu class SubTest(): def __init__(self): rospy.init_node(\"test_sub\") self.arrt", "0 self.sub = rospy.Subscriber(\"test\", Imu, self.cb, queue_size=1) self.f = open(\"latencies\", 'w') while not", "n in self.arrn: self.f.write(str(n) + \",\" + str(self.arrt[i]*1000) + \"\\n\") i+=1 self.f.close() def", "rospy.init_node(\"test_sub\") self.arrt = [] self.arrn = [] self.sum =0 self.count=0 self.max = 0", "def cb(self, msg:Imu): diff = rospy.get_time() - msg.header.stamp.to_sec() self.arrt.append(diff) self.arrn.append(msg.header.seq) self.sum += diff", "msg.header.stamp.to_sec() self.arrt.append(diff) self.arrn.append(msg.header.seq) self.sum += diff self.count +=1 self.max = max(self.max, diff) if", "import Imu class SubTest(): def __init__(self): rospy.init_node(\"test_sub\") self.arrt = [] self.arrn = []", "self.sum =0 self.count=0 self.max = 0 self.sub = rospy.Subscriber(\"test\", Imu, self.cb, queue_size=1) self.f", "from sensor_msgs.msg import Imu class SubTest(): def __init__(self): rospy.init_node(\"test_sub\") self.arrt = [] self.arrn", "self.arrn.append(msg.header.seq) self.sum += diff self.count +=1 self.max = max(self.max, diff) if __name__ ==", "i+=1 self.f.close() def cb(self, msg:Imu): diff = rospy.get_time() - msg.header.stamp.to_sec() self.arrt.append(diff) self.arrn.append(msg.header.seq) self.sum", "self.f = open(\"latencies\", 'w') while not rospy.is_shutdown(): time.sleep(1) if self.count !=0: print(\"mean: \"", "str(self.arrt[i]*1000) + \"\\n\") i+=1 self.f.close() def cb(self, msg:Imu): diff = rospy.get_time() - msg.header.stamp.to_sec()", "import time from sensor_msgs.msg import Imu class SubTest(): def __init__(self): rospy.init_node(\"test_sub\") self.arrt =", "in self.arrn: self.f.write(str(n) + \",\" + str(self.arrt[i]*1000) + \"\\n\") i+=1 self.f.close() def cb(self,", "[] self.arrn = [] self.sum =0 self.count=0 self.max = 0 self.sub = rospy.Subscriber(\"test\",", "self.cb, queue_size=1) self.f = open(\"latencies\", 'w') while not rospy.is_shutdown(): time.sleep(1) if self.count !=0:", "self.f.write(str(n) + \",\" + str(self.arrt[i]*1000) + \"\\n\") i+=1 self.f.close() def cb(self, msg:Imu): diff", "python3 import rospy import time from sensor_msgs.msg import Imu class SubTest(): def __init__(self):", "'w') while not rospy.is_shutdown(): time.sleep(1) if self.count !=0: print(\"mean: \" + str((self.sum/self.count)*1000)) print(\"max:", "=0 self.count=0 self.max = 0 self.sub = rospy.Subscriber(\"test\", Imu, self.cb, queue_size=1) self.f =", "msg:Imu): diff = rospy.get_time() - msg.header.stamp.to_sec() self.arrt.append(diff) self.arrn.append(msg.header.seq) self.sum += diff self.count +=1", "Imu class SubTest(): def __init__(self): rospy.init_node(\"test_sub\") self.arrt = [] self.arrn = [] self.sum", "self.count !=0: print(\"mean: \" + str((self.sum/self.count)*1000)) print(\"max: \" + str(self.max*1000)) i = 0", "queue_size=1) self.f = open(\"latencies\", 'w') while not rospy.is_shutdown(): time.sleep(1) if self.count !=0: print(\"mean:", "class SubTest(): def __init__(self): rospy.init_node(\"test_sub\") self.arrt = [] self.arrn = [] self.sum =0", "<reponame>MosHumanoid/bitbots_thmos_meta #!/usr/bin/env python3 import rospy import time from sensor_msgs.msg import Imu class SubTest():", "self.arrn = [] self.sum =0 self.count=0 self.max = 0 self.sub = rospy.Subscriber(\"test\", Imu,", "= [] self.sum =0 self.count=0 self.max = 0 self.sub = rospy.Subscriber(\"test\", Imu, self.cb,", "diff = rospy.get_time() - msg.header.stamp.to_sec() self.arrt.append(diff) self.arrn.append(msg.header.seq) self.sum += diff self.count +=1 self.max", "rospy.Subscriber(\"test\", Imu, self.cb, queue_size=1) self.f = open(\"latencies\", 'w') while not rospy.is_shutdown(): time.sleep(1) if", "+ str(self.arrt[i]*1000) + \"\\n\") i+=1 self.f.close() def cb(self, msg:Imu): diff = rospy.get_time() -", "\" + str(self.max*1000)) i = 0 for n in self.arrn: self.f.write(str(n) + \",\"", "+ \",\" + str(self.arrt[i]*1000) + \"\\n\") i+=1 self.f.close() def cb(self, msg:Imu): diff =", "+= diff self.count +=1 self.max = max(self.max, diff) if __name__ == \"__main__\": SubTest()", "def __init__(self): rospy.init_node(\"test_sub\") self.arrt = [] self.arrn = [] self.sum =0 self.count=0 self.max", "= open(\"latencies\", 'w') while not rospy.is_shutdown(): time.sleep(1) if self.count !=0: print(\"mean: \" +", "__init__(self): rospy.init_node(\"test_sub\") self.arrt = [] self.arrn = [] self.sum =0 self.count=0 self.max =", "str(self.max*1000)) i = 0 for n in self.arrn: self.f.write(str(n) + \",\" + str(self.arrt[i]*1000)", "print(\"max: \" + str(self.max*1000)) i = 0 for n in self.arrn: self.f.write(str(n) +", "- msg.header.stamp.to_sec() self.arrt.append(diff) self.arrn.append(msg.header.seq) self.sum += diff self.count +=1 self.max = max(self.max, diff)", "self.count=0 self.max = 0 self.sub = rospy.Subscriber(\"test\", Imu, self.cb, queue_size=1) self.f = open(\"latencies\",", "not rospy.is_shutdown(): time.sleep(1) if self.count !=0: print(\"mean: \" + str((self.sum/self.count)*1000)) print(\"max: \" +", "open(\"latencies\", 'w') while not rospy.is_shutdown(): time.sleep(1) if self.count !=0: print(\"mean: \" + str((self.sum/self.count)*1000))", "cb(self, msg:Imu): diff = rospy.get_time() - msg.header.stamp.to_sec() self.arrt.append(diff) self.arrn.append(msg.header.seq) self.sum += diff self.count", "str((self.sum/self.count)*1000)) print(\"max: \" + str(self.max*1000)) i = 0 for n in self.arrn: self.f.write(str(n)", "= 0 for n in self.arrn: self.f.write(str(n) + \",\" + str(self.arrt[i]*1000) + \"\\n\")", "while not rospy.is_shutdown(): time.sleep(1) if self.count !=0: print(\"mean: \" + str((self.sum/self.count)*1000)) print(\"max: \"", "import rospy import time from sensor_msgs.msg import Imu class SubTest(): def __init__(self): rospy.init_node(\"test_sub\")", "Imu, self.cb, queue_size=1) self.f = open(\"latencies\", 'w') while not rospy.is_shutdown(): time.sleep(1) if self.count", "+ str(self.max*1000)) i = 0 for n in self.arrn: self.f.write(str(n) + \",\" +", "self.sum += diff self.count +=1 self.max = max(self.max, diff) if __name__ == \"__main__\":", "\"\\n\") i+=1 self.f.close() def cb(self, msg:Imu): diff = rospy.get_time() - msg.header.stamp.to_sec() self.arrt.append(diff) self.arrn.append(msg.header.seq)", "= 0 self.sub = rospy.Subscriber(\"test\", Imu, self.cb, queue_size=1) self.f = open(\"latencies\", 'w') while" ]
[ "print \"Hello World\" print \"Print something else\" for i in range (10): print", "\"Hello World\" print \"Print something else\" for i in range (10): print i" ]
[ "[1, \"rgb(255, 255, 217)\"], ], \"Greens\": [ [0, \"rgb(0, 68, 27)\"], [0.125, \"rgb(0,", "\"#ff7f0e\", # safety orange \"#2ca02c\", # cooked asparagus green \"#d62728\", # brick red", "], } def make_qcodes_anglemap45(): anglemap_colorlist = make_anglemap45_colorlist(N=9, use_hpl=False) len_colorlist = len(anglemap_colorlist) color_scale =", "2.0 \"viridis\": [ (0.0, (68, 1, 84, 255)), (0.25, (58, 82, 139, 255)),", "in enumerate(scale) ] colorscales[name + \"_reversed\"] = reversed_scale # Generate also all scales", "[1, \"#fde725\"], ], } # Extracted https://github.com/pyqtgraph/pyqtgraph/blob/develop/pyqtgraph/graphicsItems/GradientEditorItem.py Gradients = { \"thermal\": [ (0.3333,", "255)), (1.0, (255, 255, 255, 255))], # Perceptually uniform sequential colormaps from Matplotlib", "in colorscales_raw.items(): colorscales[scale_name] = make_rgba(scale) for scale_name, scale in Gradients.items(): colorscales[scale_name] = scale", "} # Extracted https://github.com/pyqtgraph/pyqtgraph/blob/develop/pyqtgraph/graphicsItems/GradientEditorItem.py Gradients = { \"thermal\": [ (0.3333, (185, 0, 0,", "and colorscales, taken from plotly color_cycle = [ \"#1f77b4\", # muted blue \"#ff7f0e\",", "245, 224)\"], [1, \"rgb(247, 252, 245)\"], ], \"YlOrRd\": [ [0, \"rgb(128, 0, 38)\"],", "[0.6, \"rgb(120,70,20)\"], [1, \"rgb(255,255,255)\"], ], \"electric\": [ [0, \"rgb(0,0,0)\"], [0.15, \"rgb(30,0,100)\"], [0.4, \"rgb(120,0,100)\"],", "[0.8, \"rgb(230,200,0)\"], [1, \"rgb(255,250,220)\"], ], \"viridis\": [ [0, \"#440154\"], [0.06274509803921569, \"#48186a\"], [0.12549019607843137, \"#472d7b\"],", "[0.75, \"rgb(199, 233, 192)\"], [0.875, \"rgb(229, 245, 224)\"], [1, \"rgb(247, 252, 245)\"], ],", "\"rgb(150,0,90)\"], [0.125, \"rgb(0, 0, 200)\"], [0.25, \"rgb(0, 25, 255)\"], [0.375, \"rgb(0, 152, 255)\"],", "\"rgb(178, 10, 28)\"], ], # Scale for non-negative numeric values \"Reds\": [ [0,", "0, 255)), # ], \"grey\": [(0.0, (0, 0, 0, 255)), (1.0, (255, 255,", "colorscales to pyqtgraph tuples \"\"\" if c[0] == \"#\" and len(c) == 7:", "scale_high.insert(-1, scale[-1]) scale_high[-1] = (1.0, clip_color) if scale[-2][0] > 1 - clip_percent: scale_high[-2]", "# (0.99, (255, 255, 255, 255)), # (1.0, (255, 0, 0, 255)), #", "(0.2328863796753704, (32, 0, 129, 255)), (0.8362738179251941, (255, 255, 0, 255)), (0.5257586450247, (115, 15,", "] colorscales_raw = { \"Greys\": [[0, \"rgb(0,0,0)\"], [1, \"rgb(255,255,255)\"]], \"YlGnBu\": [ [0, \"rgb(8,", "(203, 71, 119, 255)), (0.75, (248, 149, 64, 255)), (1.0, (239, 248, 33,", "109, 44)\"], [0.25, \"rgb(35, 139, 69)\"], [0.375, \"rgb(65, 171, 93)\"], [0.5, \"rgb(116, 196,", "[0, \"rgb(0,0,0)\"], [0.3, \"rgb(230,0,0)\"], [0.6, \"rgb(255,210,0)\"], [1, \"rgb(255,255,255)\"], ], \"blackbody\": [ [0, \"rgb(0,0,0)\"],", "clip_percent: scale_low[1] = ((scale[1][0] + scale[0][0]) / 2, scale_low[1][1]) else: scale_low[1] = (clip_percent,", "scale in list(colorscales.items()): clip_percent = 0.03 clip_color = (0, 255, 0, 255) scale_low", "anglemap_colorlist = make_anglemap45_colorlist(N=9, use_hpl=False) len_colorlist = len(anglemap_colorlist) color_scale = [ [i / (len_colorlist", "\"rgb(247, 252, 245)\"], ], \"YlOrRd\": [ [0, \"rgb(128, 0, 38)\"], [0.125, \"rgb(189, 0,", "[1, \"rgb(255,255,255)\"]], \"YlGnBu\": [ [0, \"rgb(8, 29, 88)\"], [0.125, \"rgb(37, 52, 148)\"], [0.25,", "reversed_scale # Generate also all scales with cliping at green for name, scale", "to add new colors See \"make_qcodes_anglemap\" and \"make_anglemap45_colorlist\" below to get you started.", "\"#33638d\"], [0.3764705882352941, \"#2c728e\"], [0.4392156862745098, \"#26828e\"], [0.5019607843137255, \"#21918c\"], [0.5647058823529412, \"#1fa088\"], [0.6274509803921569, \"#28ae80\"], [0.6901960784313725, \"#3fbc73\"],", "for non-negative numeric values \"Reds\": [ [0, \"rgb(220, 220, 220)\"], [0.2, \"rgb(245, 195,", "(253, 231, 36, 255)), ], \"inferno\": [ (0.0, (0, 0, 3, 255)), (0.25,", "(252, 254, 164, 255)), ], \"plasma\": [ (0.0, (12, 7, 134, 255)), (0.25,", "\"RdBu\": [ [0, \"rgb(5, 10, 172)\"], [0.35, \"rgb(106, 137, 247)\"], [0.5, \"rgb(190,190,190)\"], [0.6,", "values \"Blues\": [ [0, \"rgb(5, 10, 172)\"], [0.35, \"rgb(40, 60, 190)\"], [0.5, \"rgb(70,", "only supports rgb(r,g,b) and #rrggbb colors\") colorscales = {} for scale_name, scale in", "], \"earth\": [ [0, \"rgb(0,0,130)\"], [0.1, \"rgb(0,180,180)\"], [0.2, \"rgb(40,210,40)\"], [0.4, \"rgb(230,230,50)\"], [0.6, \"rgb(120,70,20)\"],", "\"rgb(255,255,255)\"], ], \"blackbody\": [ [0, \"rgb(0,0,0)\"], [0.2, \"rgb(230,0,0)\"], [0.4, \"rgb(230,210,0)\"], [0.7, \"rgb(255,255,255)\"], [1,", "255)), (0.5, (236, 0, 134, 255)), (0.8, (246, 246, 0, 255)), (1.0, (255,", "[0.6, \"rgb(255,210,0)\"], [1, \"rgb(255,255,255)\"], ], \"blackbody\": [ [0, \"rgb(0,0,0)\"], [0.2, \"rgb(230,0,0)\"], [0.4, \"rgb(230,210,0)\"],", "\"bluered\": [[0, \"rgb(0,0,255)\"], [1, \"rgb(255,0,0)\"]], # modified RdBu based on # www.sandia.gov/~kmorel/documents/ColorMaps/ColorMapsExpanded.pdf \"RdBu\":", "return tuple(map(int, c[4:-1].split(\",\"))) + (255,) raise ValueError(\"one_rgba only supports rgb(r,g,b) and #rrggbb colors\")", "248, 217)\"], [1, \"rgb(255, 255, 217)\"], ], \"Greens\": [ [0, \"rgb(0, 68, 27)\"],", "\"rgb(0,0,255)\"], [0.1, \"rgb(51,153,255)\"], [0.2, \"rgb(102,204,255)\"], [0.3, \"rgb(153,204,255)\"], [0.4, \"rgb(204,204,255)\"], [0.5, \"rgb(255,255,255)\"], [0.6, \"rgb(255,204,255)\"],", "(58, 82, 139, 255)), (0.5, (32, 144, 140, 255)), (0.75, (94, 201, 97,", "/ 2, scale_low[1][1]) else: scale_low[1] = (clip_percent, scale_low[1][1]) colorscales[name + \"_clip_low\"] = scale_low", "[(0.0, (0, 0, 0, 255)), (1.0, (255, 255, 255, 255))], # Perceptually uniform", "allow the specification of that part... \"cyclic\": [ (0.0, (255, 0, 4, 255)),", "[ [i / (len_colorlist - 1), \"rgb\" + repr(tuple((int(x * 255) for x", "\"rgb(242,143,56)\"], [1, \"rgb(217,30,30)\"], ], \"jet\": [ [0, \"rgb(0,0,131)\"], [0.125, \"rgb(0,60,170)\"], [0.375, \"rgb(5,255,255)\"], [0.625,", "blue-teal ] colorscales_raw = { \"Greys\": [[0, \"rgb(0,0,0)\"], [1, \"rgb(255,255,255)\"]], \"YlGnBu\": [ [0,", "255) scale_low = list(scale) scale_low.insert(1, scale[0]) scale_low[0] = (0.0, clip_color) if scale[1][0] <", "(clip_percent, scale_low[1][1]) colorscales[name + \"_clip_low\"] = scale_low scale_high = list(scale) scale_high.insert(-1, scale[-1]) scale_high[-1]", "\"rgb(255,102,102)\"], [1, \"rgb(255,0,0)\"], ], \"rainbow\": [ [0, \"rgb(150,0,90)\"], [0.125, \"rgb(0, 0, 200)\"], [0.25,", "in list(colorscales.items()): last_idx = len(scale) - 1 reversed_scale = [ (scale[last_idx - i][0],", "\"greyclip\": [ # (0.0, (0, 0, 0, 255)), # (0.99, (255, 255, 255,", "reversed version of all of them. Feel free to add new colors See", "\"Reds\": [ [0, \"rgb(220, 220, 220)\"], [0.2, \"rgb(245, 195, 157)\"], [0.4, \"rgb(245, 160,", "10, 28)\"], ], # Scale for non-negative numeric values \"Reds\": [ [0, \"rgb(220,", "255)), (0.0, (0, 0, 0, 255)), ], \"yellowy\": [ (0.0, (0, 0, 0,", "# (0.0, (0, 0, 0, 255)), # (0.99, (255, 255, 255, 255)), #", "of them. Feel free to add new colors See \"make_qcodes_anglemap\" and \"make_anglemap45_colorlist\" below", "\"rgb(230, 145, 90)\"], [1, \"rgb(178, 10, 28)\"], ], # Scale for non-negative numeric", "], \"rainbow\": [ [0, \"rgb(150,0,90)\"], [0.125, \"rgb(0, 0, 200)\"], [0.25, \"rgb(0, 25, 255)\"],", "list(scale) scale_low.insert(1, scale[0]) scale_low[0] = (0.0, clip_color) if scale[1][0] < clip_percent: scale_low[1] =", "15, 109, 255)), (0.5, (187, 55, 84, 255)), (0.75, (249, 142, 8, 255)),", "all scales with cliping at green for name, scale in list(colorscales.items()): clip_percent =", "plotly color_cycle = [ \"#1f77b4\", # muted blue \"#ff7f0e\", # safety orange \"#2ca02c\",", "0.03 clip_color = (0, 255, 0, 255) scale_low = list(scale) scale_low.insert(1, scale[0]) scale_low[0]", "make_rgba(scale) for scale_name, scale in Gradients.items(): colorscales[scale_name] = scale for name, scale in", "(0, 0, 0, 255)), (0.2328863796753704, (32, 0, 129, 255)), (0.8362738179251941, (255, 255, 0,", "(1.0, (253, 231, 36, 255)), ], \"inferno\": [ (0.0, (0, 0, 3, 255)),", "\"rgb(190,190,190)\"], [0.6, \"rgb(220, 170, 132)\"], [0.7, \"rgb(230, 145, 90)\"], [1, \"rgb(178, 10, 28)\"],", "(68, 1, 84, 255)), (0.25, (58, 82, 139, 255)), (0.5, (32, 144, 140,", "55, 84, 255)), (0.75, (249, 142, 8, 255)), (1.0, (252, 254, 164, 255)),", "\"#3b528b\"], [0.3137254901960784, \"#33638d\"], [0.3764705882352941, \"#2c728e\"], [0.4392156862745098, \"#26828e\"], [0.5019607843137255, \"#21918c\"], [0.5647058823529412, \"#1fa088\"], [0.6274509803921569, \"#28ae80\"],", "\"\"\" from pycqed.analysis.tools.plotting import make_anglemap45_colorlist # default colors and colorscales, taken from plotly", "www.sandia.gov/~kmorel/documents/ColorMaps/ColorMapsExpanded.pdf \"RdBu\": [ [0, \"rgb(5, 10, 172)\"], [0.35, \"rgb(106, 137, 247)\"], [0.5, \"rgb(190,190,190)\"],", "convert a single color value to (r, g, b, a) input can be", "\"thermal\": [ (0.3333, (185, 0, 0, 255)), (0.6666, (255, 220, 0, 255)), (1,", "to allow the specification of that part... # \"greyclip\": [ # (0.0, (0,", "in col)))] for i, col in enumerate(anglemap_colorlist) ] return color_scale qcodes_anglemap45 = make_qcodes_anglemap45()", "https://github.com/pyqtgraph/pyqtgraph/blob/develop/pyqtgraph/graphicsItems/GradientEditorItem.py Gradients = { \"thermal\": [ (0.3333, (185, 0, 0, 255)), (0.6666, (255,", "[0.625, \"rgb(127, 205, 187)\"], [0.75, \"rgb(199, 233, 180)\"], [0.875, \"rgb(237, 248, 217)\"], [1,", "chestnut brown \"#e377c2\", # raspberry yogurt pink \"#7f7f7f\", # middle gray \"#bcbd22\", #", "52, 148)\"], [0.25, \"rgb(34, 94, 168)\"], [0.375, \"rgb(29, 145, 192)\"], [0.5, \"rgb(65, 182,", "scale_name, scale in Gradients.items(): colorscales[scale_name] = scale for name, scale in list(colorscales.items()): last_idx", "[0.25, \"rgb(35, 139, 69)\"], [0.375, \"rgb(65, 171, 93)\"], [0.5, \"rgb(116, 196, 118)\"], [0.625,", "167, 255)), (0.5, (203, 71, 119, 255)), (0.75, (248, 149, 64, 255)), (1.0,", "0, 3, 255)), (0.25, (80, 18, 123, 255)), (0.5, (182, 54, 121, 255)),", "a single color value to (r, g, b, a) input can be an", "\"rgb(0,0,130)\"], [0.1, \"rgb(0,180,180)\"], [0.2, \"rgb(40,210,40)\"], [0.4, \"rgb(230,230,50)\"], [0.6, \"rgb(120,70,20)\"], [1, \"rgb(255,255,255)\"], ], \"electric\":", "[ (scale[last_idx - i][0], color[1]) for i, color in enumerate(scale) ] colorscales[name +", "\"grey\": [(0.0, (0, 0, 0, 255)), (1.0, (255, 255, 255, 255))], # Perceptually", "'#rrggbb' if we decide we want more we can make more, but for", "= (clip_percent, scale_low[1][1]) colorscales[name + \"_clip_low\"] = scale_low scale_high = list(scale) scale_high.insert(-1, scale[-1])", "252, 191, 255)), ], } def make_qcodes_anglemap45(): anglemap_colorlist = make_anglemap45_colorlist(N=9, use_hpl=False) len_colorlist =", "rgb string 'rgb(r,g,b)', '#rrggbb' if we decide we want more we can make", "16), 255) if c[:4] == \"rgb(\": return tuple(map(int, c[4:-1].split(\",\"))) + (255,) raise ValueError(\"one_rgba", "[0.5, \"rgb(255,255,255)\"], [0.6, \"rgb(255,204,255)\"], [0.7, \"rgb(255,153,255)\"], [0.8, \"rgb(255,102,204)\"], [0.9, \"rgb(255,102,102)\"], [1, \"rgb(255,0,0)\"], ],", "(0.5, (32, 144, 140, 255)), (0.75, (94, 201, 97, 255)), (1.0, (253, 231,", "231, 36, 255)), ], \"inferno\": [ (0.0, (0, 0, 3, 255)), (0.25, (87,", "in enumerate(anglemap_colorlist) ] return color_scale qcodes_anglemap45 = make_qcodes_anglemap45() colorscales_raw[\"anglemap45\"] = qcodes_anglemap45 def make_rgba(colorscale):", "RdBu based on # www.sandia.gov/~kmorel/documents/ColorMaps/ColorMapsExpanded.pdf \"RdBu\": [ [0, \"rgb(5, 10, 172)\"], [0.35, \"rgb(106,", "colorscales = {} for scale_name, scale in colorscales_raw.items(): colorscales[scale_name] = make_rgba(scale) for scale_name,", "# \"greyclip\": [ # (0.0, (0, 0, 0, 255)), # (0.99, (255, 255,", "237, 160)\"], [1, \"rgb(255, 255, 204)\"], ], \"bluered\": [[0, \"rgb(0,0,255)\"], [1, \"rgb(255,0,0)\"]], #", "= ((scale[-1][0] + scale[-2][0]) / 2, scale_high[-2][1]) else: scale_high[-2] = (1 - clip_percent,", "\"rgb(0,0,131)\"], [0.125, \"rgb(0,60,170)\"], [0.375, \"rgb(5,255,255)\"], [0.625, \"rgb(255,255,0)\"], [0.875, \"rgb(250,0,0)\"], [1, \"rgb(128,0,0)\"], ], \"hot\":", "[0.875, \"rgb(237, 248, 217)\"], [1, \"rgb(255, 255, 217)\"], ], \"Greens\": [ [0, \"rgb(0,", "[ (0.2, (7, 0, 220, 255)), (0.5, (236, 0, 134, 255)), (0.8, (246,", "[0.3764705882352941, \"#2c728e\"], [0.4392156862745098, \"#26828e\"], [0.5019607843137255, \"#21918c\"], [0.5647058823529412, \"#1fa088\"], [0.6274509803921569, \"#28ae80\"], [0.6901960784313725, \"#3fbc73\"], [0.7529411764705882,", "10, 172)\"], [0.35, \"rgb(106, 137, 247)\"], [0.5, \"rgb(190,190,190)\"], [0.6, \"rgb(220, 170, 132)\"], [0.7,", "\"rgb(255, 255, 204)\"], ], \"bluered\": [[0, \"rgb(0,0,255)\"], [1, \"rgb(255,0,0)\"]], # modified RdBu based", "0, 255)), ], \"spectrum\": [ (1.0, (255, 0, 255, 255)), (0.0, (255, 0,", "scale_low[1][1]) colorscales[name + \"_clip_low\"] = scale_low scale_high = list(scale) scale_high.insert(-1, scale[-1]) scale_high[-1] =", "colormaps from Matplotlib 2.0 \"viridis\": [ (0.0, (68, 1, 84, 255)), (0.25, (58,", "[0.06274509803921569, \"#48186a\"], [0.12549019607843137, \"#472d7b\"], [0.18823529411764706, \"#424086\"], [0.25098039215686274, \"#3b528b\"], [0.3137254901960784, \"#33638d\"], [0.3764705882352941, \"#2c728e\"], [0.4392156862745098,", "numeric values \"Blues\": [ [0, \"rgb(5, 10, 172)\"], [0.35, \"rgb(40, 60, 190)\"], [0.5,", "default colors and colorscales, taken from plotly color_cycle = [ \"#1f77b4\", # muted", "10, 172)\"], [0.35, \"rgb(40, 60, 190)\"], [0.5, \"rgb(70, 100, 245)\"], [0.6, \"rgb(90, 120,", "255, 150)\"], [0.625, \"rgb(151, 255, 0)\"], [0.75, \"rgb(255, 234, 0)\"], [0.875, \"rgb(255, 111,", "(0.0, (0, 0, 0, 255)), ], \"yellowy\": [ (0.0, (0, 0, 0, 255)),", "from pyqtgraph, the circular colormap created by me (Victo), and the reversed version", "(1.0, (255, 255, 0, 255)), (0.5, (0, 0, 0, 255)), (0.25, (0, 0,", "curry yellow-green \"#17becf\", # blue-teal ] colorscales_raw = { \"Greys\": [[0, \"rgb(0,0,0)\"], [1,", "raise ValueError(\"one_rgba only supports rgb(r,g,b) and #rrggbb colors\") colorscales = {} for scale_name,", "[1, \"rgb(255, 0, 0)\"], ], \"portland\": [ [0, \"rgb(12,51,131)\"], [0.25, \"rgb(10,136,186)\"], [0.5, \"rgb(242,211,56)\"],", "[ (0.0, (0, 0, 0, 255)), (0.2328863796753704, (32, 0, 129, 255)), (0.8362738179251941, (255,", "255)), ], \"plasma\": [ (0.0, (12, 7, 134, 255)), (0.25, (126, 3, 167,", "7, 134, 255)), (0.25, (126, 3, 167, 255)), (0.5, (203, 71, 119, 255)),", "modified RdBu based on # www.sandia.gov/~kmorel/documents/ColorMaps/ColorMapsExpanded.pdf \"RdBu\": [ [0, \"rgb(5, 10, 172)\"], [0.35,", "[0, \"rgb(0,0,130)\"], [0.1, \"rgb(0,180,180)\"], [0.2, \"rgb(40,210,40)\"], [0.4, \"rgb(230,230,50)\"], [0.6, \"rgb(120,70,20)\"], [1, \"rgb(255,255,255)\"], ],", "149, 64, 255)), (1.0, (239, 248, 33, 255)), ], \"magma\": [ (0.0, (0,", "\"rgb(255,255,255)\"], ], \"electric\": [ [0, \"rgb(0,0,0)\"], [0.15, \"rgb(30,0,100)\"], [0.4, \"rgb(120,0,100)\"], [0.6, \"rgb(160,90,0)\"], [0.8,", "(1.0, clip_color) if scale[-2][0] > 1 - clip_percent: scale_high[-2] = ((scale[-1][0] + scale[-2][0])", "[ [0, \"rgb(128, 0, 38)\"], [0.125, \"rgb(189, 0, 38)\"], [0.25, \"rgb(227, 26, 28)\"],", "], \"Greens\": [ [0, \"rgb(0, 68, 27)\"], [0.125, \"rgb(0, 109, 44)\"], [0.25, \"rgb(35,", "[ [0, \"rgb(0,0,255)\"], [0.1, \"rgb(51,153,255)\"], [0.2, \"rgb(102,204,255)\"], [0.3, \"rgb(153,204,255)\"], [0.4, \"rgb(204,204,255)\"], [0.5, \"rgb(255,255,255)\"],", "\"rgb(10,136,186)\"], [0.5, \"rgb(242,211,56)\"], [0.75, \"rgb(242,143,56)\"], [1, \"rgb(217,30,30)\"], ], \"jet\": [ [0, \"rgb(0,0,131)\"], [0.125,", "last_idx = len(scale) - 1 reversed_scale = [ (scale[last_idx - i][0], color[1]) for", "orange \"#2ca02c\", # cooked asparagus green \"#d62728\", # brick red \"#9467bd\", # muted", "[0.375, \"rgb(0, 152, 255)\"], [0.5, \"rgb(44, 255, 150)\"], [0.625, \"rgb(151, 255, 0)\"], [0.75,", "[0.7529411764705882, \"#5ec962\"], [0.8156862745098039, \"#84d44b\"], [0.8784313725490196, \"#addc30\"], [0.9411764705882353, \"#d8e219\"], [1, \"#fde725\"], ], } #", "217)\"], [1, \"rgb(255, 255, 217)\"], ], \"Greens\": [ [0, \"rgb(0, 68, 27)\"], [0.125,", "82, 139, 255)), (0.5, (32, 144, 140, 255)), (0.75, (94, 201, 97, 255)),", "colorscales[scale_name] = scale for name, scale in list(colorscales.items()): last_idx = len(scale) - 1", "Generate also all scales with cliping at green for name, scale in list(colorscales.items()):", "scale[1][0] < clip_percent: scale_low[1] = ((scale[1][0] + scale[0][0]) / 2, scale_low[1][1]) else: scale_low[1]", "[0.125, \"rgb(0,60,170)\"], [0.375, \"rgb(5,255,255)\"], [0.625, \"rgb(255,255,0)\"], [0.875, \"rgb(250,0,0)\"], [1, \"rgb(128,0,0)\"], ], \"hot\": [", "[0.9411764705882353, \"#d8e219\"], [1, \"#fde725\"], ], } # Extracted https://github.com/pyqtgraph/pyqtgraph/blob/develop/pyqtgraph/graphicsItems/GradientEditorItem.py Gradients = { \"thermal\":", "\"rgb(116, 196, 118)\"], [0.625, \"rgb(161, 217, 155)\"], [0.75, \"rgb(199, 233, 192)\"], [0.875, \"rgb(229,", "\"rgb(65, 171, 93)\"], [0.5, \"rgb(116, 196, 118)\"], [0.625, \"rgb(161, 217, 155)\"], [0.75, \"rgb(199,", "[1, \"rgb(255,0,0)\"]], # modified RdBu based on # www.sandia.gov/~kmorel/documents/ColorMaps/ColorMapsExpanded.pdf \"RdBu\": [ [0, \"rgb(5,", "hsv, didn't patch qcodes to allow the specification of that part... \"cyclic\": [", "this is just to convert plotly colorscales to pyqtgraph tuples \"\"\" if c[0]", "\"rgb(230,210,0)\"], [0.7, \"rgb(255,255,255)\"], [1, \"rgb(160,200,255)\"], ], \"earth\": [ [0, \"rgb(0,0,130)\"], [0.1, \"rgb(0,180,180)\"], [0.2,", "(0, 255, 255, 255)), (1.0, (255, 255, 0, 255)), (0.5, (0, 0, 0,", "18, 123, 255)), (0.5, (182, 54, 121, 255)), (0.75, (251, 136, 97, 255)),", "for i, col in enumerate(anglemap_colorlist) ] return color_scale qcodes_anglemap45 = make_qcodes_anglemap45() colorscales_raw[\"anglemap45\"] =", "118)\"], [0.625, \"rgb(161, 217, 155)\"], [0.75, \"rgb(199, 233, 192)\"], [0.875, \"rgb(229, 245, 224)\"],", "= list(scale) scale_high.insert(-1, scale[-1]) scale_high[-1] = (1.0, clip_color) if scale[-2][0] > 1 -", "196)\"], [0.625, \"rgb(127, 205, 187)\"], [0.75, \"rgb(199, 233, 180)\"], [0.875, \"rgb(237, 248, 217)\"],", "142, 8, 255)), (1.0, (252, 254, 164, 255)), ], \"plasma\": [ (0.0, (12,", "97, 255)), (1.0, (251, 252, 191, 255)), ], } def make_qcodes_anglemap45(): anglemap_colorlist =", "col)))] for i, col in enumerate(anglemap_colorlist) ] return color_scale qcodes_anglemap45 = make_qcodes_anglemap45() colorscales_raw[\"anglemap45\"]", "= len(scale) - 1 reversed_scale = [ (scale[last_idx - i][0], color[1]) for i,", "\"rgb(0,0,0)\"], [0.3, \"rgb(230,0,0)\"], [0.6, \"rgb(255,210,0)\"], [1, \"rgb(255,255,255)\"], ], \"blackbody\": [ [0, \"rgb(0,0,0)\"], [0.2,", "\"rgb(220, 220, 220)\"], ], \"picnic\": [ [0, \"rgb(0,0,255)\"], [0.1, \"rgb(51,153,255)\"], [0.2, \"rgb(102,204,255)\"], [0.3,", "from Matplotlib 2.0 \"viridis\": [ (0.0, (68, 1, 84, 255)), (0.25, (58, 82,", "[1, \"rgb(217,30,30)\"], ], \"jet\": [ [0, \"rgb(0,0,131)\"], [0.125, \"rgb(0,60,170)\"], [0.375, \"rgb(5,255,255)\"], [0.625, \"rgb(255,255,0)\"],", "raspberry yogurt pink \"#7f7f7f\", # middle gray \"#bcbd22\", # curry yellow-green \"#17becf\", #", "97, 255)), (1.0, (253, 231, 36, 255)), ], \"inferno\": [ (0.0, (0, 0,", "220, 255)), (0.5, (236, 0, 134, 255)), (0.8, (246, 246, 0, 255)), (1.0,", "scale in Gradients.items(): colorscales[scale_name] = scale for name, scale in list(colorscales.items()): last_idx =", "(1.0, (255, 255, 255, 255))], # Perceptually uniform sequential colormaps from Matplotlib 2.0", "220, 220)\"], ], \"picnic\": [ [0, \"rgb(0,0,255)\"], [0.1, \"rgb(51,153,255)\"], [0.2, \"rgb(102,204,255)\"], [0.3, \"rgb(153,204,255)\"],", "\"#48186a\"], [0.12549019607843137, \"#472d7b\"], [0.18823529411764706, \"#424086\"], [0.25098039215686274, \"#3b528b\"], [0.3137254901960784, \"#33638d\"], [0.3764705882352941, \"#2c728e\"], [0.4392156862745098, \"#26828e\"],", "224)\"], [1, \"rgb(247, 252, 245)\"], ], \"YlOrRd\": [ [0, \"rgb(128, 0, 38)\"], [0.125,", "0, 0, 255)), (0.25, (0, 0, 255, 255)), (0.75, (255, 0, 0, 255)),", "144, 140, 255)), (0.75, (94, 201, 97, 255)), (1.0, (253, 231, 36, 255)),", "254, 164, 255)), ], \"plasma\": [ (0.0, (12, 7, 134, 255)), (0.25, (126,", "colors maps from the qcodes, context menu of the color bar from pyqtgraph,", "255)), (0.2328863796753704, (32, 0, 129, 255)), (0.8362738179251941, (255, 255, 0, 255)), (0.5257586450247, (115,", "[1, \"rgb(247, 252, 245)\"], ], \"YlOrRd\": [ [0, \"rgb(128, 0, 38)\"], [0.125, \"rgb(189,", "(251, 136, 97, 255)), (1.0, (251, 252, 191, 255)), ], } def make_qcodes_anglemap45():", "\"#2ca02c\", # cooked asparagus green \"#d62728\", # brick red \"#9467bd\", # muted purple", "160)\"], [1, \"rgb(255, 255, 204)\"], ], \"bluered\": [[0, \"rgb(0,0,255)\"], [1, \"rgb(255,0,0)\"]], # modified", "[0.6901960784313725, \"#3fbc73\"], [0.7529411764705882, \"#5ec962\"], [0.8156862745098039, \"#84d44b\"], [0.8784313725490196, \"#addc30\"], [0.9411764705882353, \"#d8e219\"], [1, \"#fde725\"], ],", "\"rgb(\": return tuple(map(int, c[4:-1].split(\",\"))) + (255,) raise ValueError(\"one_rgba only supports rgb(r,g,b) and #rrggbb", "cooked asparagus green \"#d62728\", # brick red \"#9467bd\", # muted purple \"#8c564b\", #", "# chestnut brown \"#e377c2\", # raspberry yogurt pink \"#7f7f7f\", # middle gray \"#bcbd22\",", "119, 255)), (0.75, (248, 149, 64, 255)), (1.0, (239, 248, 33, 255)), ],", "colors\") colorscales = {} for scale_name, scale in colorscales_raw.items(): colorscales[scale_name] = make_rgba(scale) for", "44)\"], [0.25, \"rgb(35, 139, 69)\"], [0.375, \"rgb(65, 171, 93)\"], [0.5, \"rgb(116, 196, 118)\"],", "= 0.03 clip_color = (0, 255, 0, 255) scale_low = list(scale) scale_low.insert(1, scale[0])", "[0.4, \"rgb(204,204,255)\"], [0.5, \"rgb(255,255,255)\"], [0.6, \"rgb(255,204,255)\"], [0.7, \"rgb(255,153,255)\"], [0.8, \"rgb(255,102,204)\"], [0.9, \"rgb(255,102,102)\"], [1,", "if c[0] == \"#\" and len(c) == 7: return (int(c[1:3], 16), int(c[3:5], 16),", "we decide we want more we can make more, but for now this", "[i / (len_colorlist - 1), \"rgb\" + repr(tuple((int(x * 255) for x in", "255, 255)), (0.0, (0, 0, 0, 255)), ], \"yellowy\": [ (0.0, (0, 0,", "clip_percent: scale_high[-2] = ((scale[-1][0] + scale[-2][0]) / 2, scale_high[-2][1]) else: scale_high[-2] = (1", "\"Greens\": [ [0, \"rgb(0, 68, 27)\"], [0.125, \"rgb(0, 109, 44)\"], [0.25, \"rgb(35, 139,", "[0.5647058823529412, \"#1fa088\"], [0.6274509803921569, \"#28ae80\"], [0.6901960784313725, \"#3fbc73\"], [0.7529411764705882, \"#5ec962\"], [0.8156862745098039, \"#84d44b\"], [0.8784313725490196, \"#addc30\"], [0.9411764705882353,", "33, 255)), ], \"magma\": [ (0.0, (0, 0, 3, 255)), (0.25, (80, 18,", "asparagus green \"#d62728\", # brick red \"#9467bd\", # muted purple \"#8c564b\", # chestnut", "\"#1fa088\"], [0.6274509803921569, \"#28ae80\"], [0.6901960784313725, \"#3fbc73\"], [0.7529411764705882, \"#5ec962\"], [0.8156862745098039, \"#84d44b\"], [0.8784313725490196, \"#addc30\"], [0.9411764705882353, \"#d8e219\"],", "[ [0, \"rgb(12,51,131)\"], [0.25, \"rgb(10,136,186)\"], [0.5, \"rgb(242,211,56)\"], [0.75, \"rgb(242,143,56)\"], [1, \"rgb(217,30,30)\"], ], \"jet\":", "to (r, g, b, a) input can be an rgb string 'rgb(r,g,b)', '#rrggbb'", "in Gradients.items(): colorscales[scale_name] = scale for name, scale in list(colorscales.items()): last_idx = len(scale)", "started. \"\"\" from pycqed.analysis.tools.plotting import make_anglemap45_colorlist # default colors and colorscales, taken from", "+ repr(tuple((int(x * 255) for x in col)))] for i, col in enumerate(anglemap_colorlist)", "\"rgb(151, 255, 0)\"], [0.75, \"rgb(255, 234, 0)\"], [0.875, \"rgb(255, 111, 0)\"], [1, \"rgb(255,", "(0.0, (0, 0, 3, 255)), (0.25, (80, 18, 123, 255)), (0.5, (182, 54,", "of that part... # \"greyclip\": [ # (0.0, (0, 0, 0, 255)), #", "the specification of that part... \"cyclic\": [ (0.0, (255, 0, 4, 255)), (1.0,", "(0, 0, 0, 255)), # (0.99, (255, 255, 255, 255)), # (1.0, (255,", "0, 255)), # (0.99, (255, 255, 255, 255)), # (1.0, (255, 0, 0,", "\"rgb(127, 205, 187)\"], [0.75, \"rgb(199, 233, 180)\"], [0.875, \"rgb(237, 248, 217)\"], [1, \"rgb(255,", "\"flame\": [ (0.2, (7, 0, 220, 255)), (0.5, (236, 0, 134, 255)), (0.8,", "[0.875, \"rgb(250,0,0)\"], [1, \"rgb(128,0,0)\"], ], \"hot\": [ [0, \"rgb(0,0,0)\"], [0.3, \"rgb(230,0,0)\"], [0.6, \"rgb(255,210,0)\"],", "== \"#\" and len(c) == 7: return (int(c[1:3], 16), int(c[3:5], 16), int(c[5:7], 16),", "255)), (1.0, (252, 254, 164, 255)), ], \"plasma\": [ (0.0, (12, 7, 134,", "just to convert plotly colorscales to pyqtgraph tuples \"\"\" if c[0] == \"#\"", "all the colors maps from the qcodes, context menu of the color bar", "c[0] == \"#\" and len(c) == 7: return (int(c[1:3], 16), int(c[3:5], 16), int(c[5:7],", "204)\"], ], \"bluered\": [[0, \"rgb(0,0,255)\"], [1, \"rgb(255,0,0)\"]], # modified RdBu based on #", "[1, \"rgb(178, 10, 28)\"], ], # Scale for non-positive numeric values \"Blues\": [", "[0.125, \"rgb(0, 0, 200)\"], [0.25, \"rgb(0, 25, 255)\"], [0.375, \"rgb(0, 152, 255)\"], [0.5,", "\"bipolar\": [ (0.0, (0, 255, 255, 255)), (1.0, (255, 255, 0, 255)), (0.5,", "7: return (int(c[1:3], 16), int(c[3:5], 16), int(c[5:7], 16), 255) if c[:4] == \"rgb(\":", "(0, 0, 0, 255)), ], \"yellowy\": [ (0.0, (0, 0, 0, 255)), (0.2328863796753704,", "patch qcodes to allow the specification of that part... # \"greyclip\": [ #", "255)), ], \"bipolar\": [ (0.0, (0, 255, 255, 255)), (1.0, (255, 255, 0,", "[0, \"rgb(220, 220, 220)\"], [0.2, \"rgb(245, 195, 157)\"], [0.4, \"rgb(245, 160, 105)\"], [1,", "= len(anglemap_colorlist) color_scale = [ [i / (len_colorlist - 1), \"rgb\" + repr(tuple((int(x", "rgb(r,g,b) and #rrggbb colors\") colorscales = {} for scale_name, scale in colorscales_raw.items(): colorscales[scale_name]", "by <NAME> for Measurement Control It modules makes available all the colors maps", "> 1 - clip_percent: scale_high[-2] = ((scale[-1][0] + scale[-2][0]) / 2, scale_high[-2][1]) else:", "0, 4, 255)), (1.0, (255, 0, 0, 255)), ], # this is a", "by me (Victo), and the reversed version of all of them. Feel free", "[0.6, \"rgb(220, 170, 132)\"], [0.7, \"rgb(230, 145, 90)\"], [1, \"rgb(178, 10, 28)\"], ],", "import make_anglemap45_colorlist # default colors and colorscales, taken from plotly color_cycle = [", "255, 255)), ], \"bipolar\": [ (0.0, (0, 255, 255, 255)), (1.0, (255, 255,", "(0.0, (255, 0, 0, 255)), ], # this is a hsv, didn't patch", "- clip_percent: scale_high[-2] = ((scale[-1][0] + scale[-2][0]) / 2, scale_high[-2][1]) else: scale_high[-2] =", "(0.5, (182, 54, 121, 255)), (0.75, (251, 136, 97, 255)), (1.0, (251, 252,", "(1, (255, 255, 255, 255)), (0, (0, 0, 0, 255)), ], \"flame\": [", "71, 119, 255)), (0.75, (248, 149, 64, 255)), (1.0, (239, 248, 33, 255)),", "], \"picnic\": [ [0, \"rgb(0,0,255)\"], [0.1, \"rgb(51,153,255)\"], [0.2, \"rgb(102,204,255)\"], [0.3, \"rgb(153,204,255)\"], [0.4, \"rgb(204,204,255)\"],", "255)), (1.0, (239, 248, 33, 255)), ], \"magma\": [ (0.0, (0, 0, 3,", "255)), (0.5257586450247, (115, 15, 255, 255)), (1.0, (255, 255, 255, 255)), ], \"bipolar\":", "(115, 15, 255, 255)), (1.0, (255, 255, 255, 255)), ], \"bipolar\": [ (0.0,", "scale[-2][0]) / 2, scale_high[-2][1]) else: scale_high[-2] = (1 - clip_percent, scale_high[-2][1]) colorscales[name +", "0, 255)), (0.6666, (255, 220, 0, 255)), (1, (255, 255, 255, 255)), (0,", "217, 118)\"], [0.875, \"rgb(255, 237, 160)\"], [1, \"rgb(255, 255, 204)\"], ], \"bluered\": [[0,", "\"rgb(40, 60, 190)\"], [0.5, \"rgb(70, 100, 245)\"], [0.6, \"rgb(90, 120, 245)\"], [0.7, \"rgb(106,", "[0, \"rgb(12,51,131)\"], [0.25, \"rgb(10,136,186)\"], [0.5, \"rgb(242,211,56)\"], [0.75, \"rgb(242,143,56)\"], [1, \"rgb(217,30,30)\"], ], \"jet\": [", "below to get you started. \"\"\" from pycqed.analysis.tools.plotting import make_anglemap45_colorlist # default colors", "(255, 0, 4, 255)), (1.0, (255, 0, 0, 255)), ], # this is", "[0, \"#440154\"], [0.06274509803921569, \"#48186a\"], [0.12549019607843137, \"#472d7b\"], [0.18823529411764706, \"#424086\"], [0.25098039215686274, \"#3b528b\"], [0.3137254901960784, \"#33638d\"], [0.3764705882352941,", "+ (255,) raise ValueError(\"one_rgba only supports rgb(r,g,b) and #rrggbb colors\") colorscales = {}", "for scale_name, scale in Gradients.items(): colorscales[scale_name] = scale for name, scale in list(colorscales.items()):", "\"earth\": [ [0, \"rgb(0,0,130)\"], [0.1, \"rgb(0,180,180)\"], [0.2, \"rgb(40,210,40)\"], [0.4, \"rgb(230,230,50)\"], [0.6, \"rgb(120,70,20)\"], [1,", "[ (1.0, (255, 0, 255, 255)), (0.0, (255, 0, 0, 255)), ], #", "\"Greys\": [[0, \"rgb(0,0,0)\"], [1, \"rgb(255,255,255)\"]], \"YlGnBu\": [ [0, \"rgb(8, 29, 88)\"], [0.125, \"rgb(37,", "len(c) == 7: return (int(c[1:3], 16), int(c[3:5], 16), int(c[5:7], 16), 255) if c[:4]", "from the qcodes, context menu of the color bar from pyqtgraph, the circular", "\"#28ae80\"], [0.6901960784313725, \"#3fbc73\"], [0.7529411764705882, \"#5ec962\"], [0.8156862745098039, \"#84d44b\"], [0.8784313725490196, \"#addc30\"], [0.9411764705882353, \"#d8e219\"], [1, \"#fde725\"],", "[ (0.0, (12, 7, 134, 255)), (0.25, (126, 3, 167, 255)), (0.5, (203,", "\"_reversed\"] = reversed_scale # Generate also all scales with cliping at green for", "0, 38)\"], [0.125, \"rgb(189, 0, 38)\"], [0.25, \"rgb(227, 26, 28)\"], [0.375, \"rgb(252, 78,", "[1, \"rgb(255, 255, 204)\"], ], \"bluered\": [[0, \"rgb(0,0,255)\"], [1, \"rgb(255,0,0)\"]], # modified RdBu", "\"spectrum\": [ (1.0, (255, 0, 255, 255)), (0.0, (255, 0, 0, 255)), ],", "colorscale] def one_rgba(c): \"\"\" convert a single color value to (r, g, b,", "0, 0, 255)), ], \"yellowy\": [ (0.0, (0, 0, 0, 255)), (0.2328863796753704, (32,", "[0, \"rgb(0,0,255)\"], [0.1, \"rgb(51,153,255)\"], [0.2, \"rgb(102,204,255)\"], [0.3, \"rgb(153,204,255)\"], [0.4, \"rgb(204,204,255)\"], [0.5, \"rgb(255,255,255)\"], [0.6,", "\"rgb(220, 220, 220)\"], [0.2, \"rgb(245, 195, 157)\"], [0.4, \"rgb(245, 160, 105)\"], [1, \"rgb(178,", "\"rgb(229, 245, 224)\"], [1, \"rgb(247, 252, 245)\"], ], \"YlOrRd\": [ [0, \"rgb(128, 0,", "[0.7, \"rgb(106, 137, 247)\"], [1, \"rgb(220, 220, 220)\"], ], \"picnic\": [ [0, \"rgb(0,0,255)\"],", "0, 255)), ], \"flame\": [ (0.2, (7, 0, 220, 255)), (0.5, (236, 0,", "0, 0, 255)), (0.2328863796753704, (32, 0, 129, 255)), (0.8362738179251941, (255, 255, 0, 255)),", "patch qcodes to allow the specification of that part... \"cyclic\": [ (0.0, (255,", "[0.875, \"rgb(229, 245, 224)\"], [1, \"rgb(247, 252, 245)\"], ], \"YlOrRd\": [ [0, \"rgb(128,", "[0.25, \"rgb(0, 25, 255)\"], [0.375, \"rgb(0, 152, 255)\"], [0.5, \"rgb(44, 255, 150)\"], [0.625,", "[ [0, \"rgb(0,0,0)\"], [0.15, \"rgb(30,0,100)\"], [0.4, \"rgb(120,0,100)\"], [0.6, \"rgb(160,90,0)\"], [0.8, \"rgb(230,200,0)\"], [1, \"rgb(255,250,220)\"],", "qcodes to allow the specification of that part... # \"greyclip\": [ # (0.0,", "28)\"], ], # Scale for non-positive numeric values \"Blues\": [ [0, \"rgb(5, 10,", "= { \"thermal\": [ (0.3333, (185, 0, 0, 255)), (0.6666, (255, 220, 0,", "(185, 0, 0, 255)), (0.6666, (255, 220, 0, 255)), (1, (255, 255, 255,", "[ [0, \"rgb(8, 29, 88)\"], [0.125, \"rgb(37, 52, 148)\"], [0.25, \"rgb(34, 94, 168)\"],", "the qcodes, context menu of the color bar from pyqtgraph, the circular colormap", "color bar from pyqtgraph, the circular colormap created by me (Victo), and the", "\"rgb(40,210,40)\"], [0.4, \"rgb(230,230,50)\"], [0.6, \"rgb(120,70,20)\"], [1, \"rgb(255,255,255)\"], ], \"electric\": [ [0, \"rgb(0,0,0)\"], [0.15,", "an rgb string 'rgb(r,g,b)', '#rrggbb' if we decide we want more we can", "scale_low[1] = (clip_percent, scale_low[1][1]) colorscales[name + \"_clip_low\"] = scale_low scale_high = list(scale) scale_high.insert(-1,", "105)\"], [1, \"rgb(178, 10, 28)\"], ], # Scale for non-positive numeric values \"Blues\":", "187)\"], [0.75, \"rgb(199, 233, 180)\"], [0.875, \"rgb(237, 248, 217)\"], [1, \"rgb(255, 255, 217)\"],", "[0.3, \"rgb(153,204,255)\"], [0.4, \"rgb(204,204,255)\"], [0.5, \"rgb(255,255,255)\"], [0.6, \"rgb(255,204,255)\"], [0.7, \"rgb(255,153,255)\"], [0.8, \"rgb(255,102,204)\"], [0.9,", "[0, \"rgb(8, 29, 88)\"], [0.125, \"rgb(37, 52, 148)\"], [0.25, \"rgb(34, 94, 168)\"], [0.375,", "# muted blue \"#ff7f0e\", # safety orange \"#2ca02c\", # cooked asparagus green \"#d62728\",", "brick red \"#9467bd\", # muted purple \"#8c564b\", # chestnut brown \"#e377c2\", # raspberry", "255)), (1.0, (253, 231, 36, 255)), ], \"inferno\": [ (0.0, (0, 0, 3,", "scale_low = list(scale) scale_low.insert(1, scale[0]) scale_low[0] = (0.0, clip_color) if scale[1][0] < clip_percent:", "clip_color = (0, 255, 0, 255) scale_low = list(scale) scale_low.insert(1, scale[0]) scale_low[0] =", "\"#d8e219\"], [1, \"#fde725\"], ], } # Extracted https://github.com/pyqtgraph/pyqtgraph/blob/develop/pyqtgraph/graphicsItems/GradientEditorItem.py Gradients = { \"thermal\": [", "scale[-2][0] > 1 - clip_percent: scale_high[-2] = ((scale[-1][0] + scale[-2][0]) / 2, scale_high[-2][1])", "clip_color) if scale[-2][0] > 1 - clip_percent: scale_high[-2] = ((scale[-1][0] + scale[-2][0]) /", "(0.8362738179251941, (255, 255, 0, 255)), (0.5257586450247, (115, 15, 255, 255)), (1.0, (255, 255,", "+ scale[0][0]) / 2, scale_low[1][1]) else: scale_low[1] = (clip_percent, scale_low[1][1]) colorscales[name + \"_clip_low\"]", "+ scale[-2][0]) / 2, scale_high[-2][1]) else: scale_high[-2] = (1 - clip_percent, scale_high[-2][1]) colorscales[name", "\"rgb(255,255,255)\"], [1, \"rgb(160,200,255)\"], ], \"earth\": [ [0, \"rgb(0,0,130)\"], [0.1, \"rgb(0,180,180)\"], [0.2, \"rgb(40,210,40)\"], [0.4,", "255, 255, 255)), ], \"bipolar\": [ (0.0, (0, 255, 255, 255)), (1.0, (255,", "0, 0, 255)), ], \"spectrum\": [ (1.0, (255, 0, 255, 255)), (0.0, (255,", "[0.75, \"rgb(255, 234, 0)\"], [0.875, \"rgb(255, 111, 0)\"], [1, \"rgb(255, 0, 0)\"], ],", "want more we can make more, but for now this is just to", "make_qcodes_anglemap45() colorscales_raw[\"anglemap45\"] = qcodes_anglemap45 def make_rgba(colorscale): return [(v, one_rgba(c)) for v, c in", "# Extracted https://github.com/pyqtgraph/pyqtgraph/blob/develop/pyqtgraph/graphicsItems/GradientEditorItem.py Gradients = { \"thermal\": [ (0.3333, (185, 0, 0, 255)),", "color_cycle = [ \"#1f77b4\", # muted blue \"#ff7f0e\", # safety orange \"#2ca02c\", #", "60, 190)\"], [0.5, \"rgb(70, 100, 245)\"], [0.6, \"rgb(90, 120, 245)\"], [0.7, \"rgb(106, 137,", "(236, 0, 134, 255)), (0.8, (246, 246, 0, 255)), (1.0, (255, 255, 255,", "make_rgba(colorscale): return [(v, one_rgba(c)) for v, c in colorscale] def one_rgba(c): \"\"\" convert", "255)), (0.8, (246, 246, 0, 255)), (1.0, (255, 255, 255, 255)), (0.0, (0,", "255)), (0.0, (255, 0, 0, 255)), ], # this is a hsv, didn't", "\"rgb(254, 178, 76)\"], [0.75, \"rgb(254, 217, 118)\"], [0.875, \"rgb(255, 237, 160)\"], [1, \"rgb(255,", "[0, \"rgb(0,0,131)\"], [0.125, \"rgb(0,60,170)\"], [0.375, \"rgb(5,255,255)\"], [0.625, \"rgb(255,255,0)\"], [0.875, \"rgb(250,0,0)\"], [1, \"rgb(128,0,0)\"], ],", "on # www.sandia.gov/~kmorel/documents/ColorMaps/ColorMapsExpanded.pdf \"RdBu\": [ [0, \"rgb(5, 10, 172)\"], [0.35, \"rgb(106, 137, 247)\"],", "\"rgb(0,180,180)\"], [0.2, \"rgb(40,210,40)\"], [0.4, \"rgb(230,230,50)\"], [0.6, \"rgb(120,70,20)\"], [1, \"rgb(255,255,255)\"], ], \"electric\": [ [0,", "qcodes_anglemap45 def make_rgba(colorscale): return [(v, one_rgba(c)) for v, c in colorscale] def one_rgba(c):", "# Scale for non-negative numeric values \"Reds\": [ [0, \"rgb(220, 220, 220)\"], [0.2,", "middle gray \"#bcbd22\", # curry yellow-green \"#17becf\", # blue-teal ] colorscales_raw = {", "(Victo), and the reversed version of all of them. Feel free to add", "me (Victo), and the reversed version of all of them. Feel free to", "[0.7, \"rgb(230, 145, 90)\"], [1, \"rgb(178, 10, 28)\"], ], # Scale for non-negative", "to get you started. \"\"\" from pycqed.analysis.tools.plotting import make_anglemap45_colorlist # default colors and", "\"rgb(0,60,170)\"], [0.375, \"rgb(5,255,255)\"], [0.625, \"rgb(255,255,0)\"], [0.875, \"rgb(250,0,0)\"], [1, \"rgb(128,0,0)\"], ], \"hot\": [ [0,", "for x in col)))] for i, col in enumerate(anglemap_colorlist) ] return color_scale qcodes_anglemap45", "\"rgb(199, 233, 192)\"], [0.875, \"rgb(229, 245, 224)\"], [1, \"rgb(247, 252, 245)\"], ], \"YlOrRd\":", "\"rgb(255,250,220)\"], ], \"viridis\": [ [0, \"#440154\"], [0.06274509803921569, \"#48186a\"], [0.12549019607843137, \"#472d7b\"], [0.18823529411764706, \"#424086\"], [0.25098039215686274,", "[0, \"rgb(0,0,0)\"], [0.15, \"rgb(30,0,100)\"], [0.4, \"rgb(120,0,100)\"], [0.6, \"rgb(160,90,0)\"], [0.8, \"rgb(230,200,0)\"], [1, \"rgb(255,250,220)\"], ],", "[0.35, \"rgb(40, 60, 190)\"], [0.5, \"rgb(70, 100, 245)\"], [0.6, \"rgb(90, 120, 245)\"], [0.7,", "qcodes to allow the specification of that part... \"cyclic\": [ (0.0, (255, 0,", "[0.35, \"rgb(106, 137, 247)\"], [0.5, \"rgb(190,190,190)\"], [0.6, \"rgb(220, 170, 132)\"], [0.7, \"rgb(230, 145,", "\"rgb(12,51,131)\"], [0.25, \"rgb(10,136,186)\"], [0.5, \"rgb(242,211,56)\"], [0.75, \"rgb(242,143,56)\"], [1, \"rgb(217,30,30)\"], ], \"jet\": [ [0,", "[0.5, \"rgb(70, 100, 245)\"], [0.6, \"rgb(90, 120, 245)\"], [0.7, \"rgb(106, 137, 247)\"], [1,", "one_rgba(c)) for v, c in colorscale] def one_rgba(c): \"\"\" convert a single color", "\"rgb(220, 170, 132)\"], [0.7, \"rgb(230, 145, 90)\"], [1, \"rgb(178, 10, 28)\"], ], #", "(246, 246, 0, 255)), (1.0, (255, 255, 255, 255)), (0.0, (0, 0, 0,", "\"rgb(255,204,255)\"], [0.7, \"rgb(255,153,255)\"], [0.8, \"rgb(255,102,204)\"], [0.9, \"rgb(255,102,102)\"], [1, \"rgb(255,0,0)\"], ], \"rainbow\": [ [0,", "[0.8156862745098039, \"#84d44b\"], [0.8784313725490196, \"#addc30\"], [0.9411764705882353, \"#d8e219\"], [1, \"#fde725\"], ], } # Extracted https://github.com/pyqtgraph/pyqtgraph/blob/develop/pyqtgraph/graphicsItems/GradientEditorItem.py", "255, 255))], # Perceptually uniform sequential colormaps from Matplotlib 2.0 \"viridis\": [ (0.0,", "[ (0.3333, (185, 0, 0, 255)), (0.6666, (255, 220, 0, 255)), (1, (255,", "v, c in colorscale] def one_rgba(c): \"\"\" convert a single color value to", "[0.25098039215686274, \"#3b528b\"], [0.3137254901960784, \"#33638d\"], [0.3764705882352941, \"#2c728e\"], [0.4392156862745098, \"#26828e\"], [0.5019607843137255, \"#21918c\"], [0.5647058823529412, \"#1fa088\"], [0.6274509803921569,", "is a hsv, didn't patch qcodes to allow the specification of that part...", "\"rgb(255,102,204)\"], [0.9, \"rgb(255,102,102)\"], [1, \"rgb(255,0,0)\"], ], \"rainbow\": [ [0, \"rgb(150,0,90)\"], [0.125, \"rgb(0, 0,", "(0.75, (251, 136, 97, 255)), (1.0, (251, 252, 191, 255)), ], } def", "\"#bcbd22\", # curry yellow-green \"#17becf\", # blue-teal ] colorscales_raw = { \"Greys\": [[0,", "# curry yellow-green \"#17becf\", # blue-teal ] colorscales_raw = { \"Greys\": [[0, \"rgb(0,0,0)\"],", "(len_colorlist - 1), \"rgb\" + repr(tuple((int(x * 255) for x in col)))] for", "specification of that part... \"cyclic\": [ (0.0, (255, 0, 4, 255)), (1.0, (255,", "\"rgb(128,0,0)\"], ], \"hot\": [ [0, \"rgb(0,0,0)\"], [0.3, \"rgb(230,0,0)\"], [0.6, \"rgb(255,210,0)\"], [1, \"rgb(255,255,255)\"], ],", "\"rgb(120,70,20)\"], [1, \"rgb(255,255,255)\"], ], \"electric\": [ [0, \"rgb(0,0,0)\"], [0.15, \"rgb(30,0,100)\"], [0.4, \"rgb(120,0,100)\"], [0.6,", "[0.2, \"rgb(245, 195, 157)\"], [0.4, \"rgb(245, 160, 105)\"], [1, \"rgb(178, 10, 28)\"], ],", "[0, \"rgb(5, 10, 172)\"], [0.35, \"rgb(106, 137, 247)\"], [0.5, \"rgb(190,190,190)\"], [0.6, \"rgb(220, 170,", "from pycqed.analysis.tools.plotting import make_anglemap45_colorlist # default colors and colorscales, taken from plotly color_cycle", "255)), (1.0, (255, 255, 255, 255)), ], \"bipolar\": [ (0.0, (0, 255, 255,", "plotly colorscales to pyqtgraph tuples \"\"\" if c[0] == \"#\" and len(c) ==", "255)), (0.75, (248, 149, 64, 255)), (1.0, (239, 248, 33, 255)), ], \"magma\":", "- 1 reversed_scale = [ (scale[last_idx - i][0], color[1]) for i, color in", "[ (0.0, (0, 0, 3, 255)), (0.25, (87, 15, 109, 255)), (0.5, (187,", "\"blackbody\": [ [0, \"rgb(0,0,0)\"], [0.2, \"rgb(230,0,0)\"], [0.4, \"rgb(230,210,0)\"], [0.7, \"rgb(255,255,255)\"], [1, \"rgb(160,200,255)\"], ],", "See \"make_qcodes_anglemap\" and \"make_anglemap45_colorlist\" below to get you started. \"\"\" from pycqed.analysis.tools.plotting import", "int(c[5:7], 16), 255) if c[:4] == \"rgb(\": return tuple(map(int, c[4:-1].split(\",\"))) + (255,) raise", "[ \"#1f77b4\", # muted blue \"#ff7f0e\", # safety orange \"#2ca02c\", # cooked asparagus", "# Scale for non-positive numeric values \"Blues\": [ [0, \"rgb(5, 10, 172)\"], [0.35,", "\"rgb(5, 10, 172)\"], [0.35, \"rgb(40, 60, 190)\"], [0.5, \"rgb(70, 100, 245)\"], [0.6, \"rgb(90,", "a hsv, didn't patch qcodes to allow the specification of that part... \"cyclic\":", "[1, \"rgb(255,250,220)\"], ], \"viridis\": [ [0, \"#440154\"], [0.06274509803921569, \"#48186a\"], [0.12549019607843137, \"#472d7b\"], [0.18823529411764706, \"#424086\"],", "(0.6666, (255, 220, 0, 255)), (1, (255, 255, 255, 255)), (0, (0, 0,", "= [ [i / (len_colorlist - 1), \"rgb\" + repr(tuple((int(x * 255) for", "this is a hsv, didn't patch qcodes to allow the specification of that", "\"rgb(255,0,0)\"], ], \"rainbow\": [ [0, \"rgb(150,0,90)\"], [0.125, \"rgb(0, 0, 200)\"], [0.25, \"rgb(0, 25,", "], \"yellowy\": [ (0.0, (0, 0, 0, 255)), (0.2328863796753704, (32, 0, 129, 255)),", "[0.9, \"rgb(255,102,102)\"], [1, \"rgb(255,0,0)\"], ], \"rainbow\": [ [0, \"rgb(150,0,90)\"], [0.125, \"rgb(0, 0, 200)\"],", "c[:4] == \"rgb(\": return tuple(map(int, c[4:-1].split(\",\"))) + (255,) raise ValueError(\"one_rgba only supports rgb(r,g,b)", "\"rgb(0,0,255)\"], [1, \"rgb(255,0,0)\"]], # modified RdBu based on # www.sandia.gov/~kmorel/documents/ColorMaps/ColorMapsExpanded.pdf \"RdBu\": [ [0,", "3, 255)), (0.25, (87, 15, 109, 255)), (0.5, (187, 55, 84, 255)), (0.75,", "\"#1f77b4\", # muted blue \"#ff7f0e\", # safety orange \"#2ca02c\", # cooked asparagus green", "69)\"], [0.375, \"rgb(65, 171, 93)\"], [0.5, \"rgb(116, 196, 118)\"], [0.625, \"rgb(161, 217, 155)\"],", "78, 42)\"], [0.5, \"rgb(253, 141, 60)\"], [0.625, \"rgb(254, 178, 76)\"], [0.75, \"rgb(254, 217,", "colors and colorscales, taken from plotly color_cycle = [ \"#1f77b4\", # muted blue", "[0.375, \"rgb(65, 171, 93)\"], [0.5, \"rgb(116, 196, 118)\"], [0.625, \"rgb(161, 217, 155)\"], [0.75,", "colorscales[scale_name] = make_rgba(scale) for scale_name, scale in Gradients.items(): colorscales[scale_name] = scale for name,", "255)), ], } def make_qcodes_anglemap45(): anglemap_colorlist = make_anglemap45_colorlist(N=9, use_hpl=False) len_colorlist = len(anglemap_colorlist) color_scale", "\"YlOrRd\": [ [0, \"rgb(128, 0, 38)\"], [0.125, \"rgb(189, 0, 38)\"], [0.25, \"rgb(227, 26,", "enumerate(scale) ] colorscales[name + \"_reversed\"] = reversed_scale # Generate also all scales with", "to allow the specification of that part... \"cyclic\": [ (0.0, (255, 0, 4,", "tuple(map(int, c[4:-1].split(\",\"))) + (255,) raise ValueError(\"one_rgba only supports rgb(r,g,b) and #rrggbb colors\") colorscales", "(87, 15, 109, 255)), (0.5, (187, 55, 84, 255)), (0.75, (249, 142, 8,", "[ [0, \"#440154\"], [0.06274509803921569, \"#48186a\"], [0.12549019607843137, \"#472d7b\"], [0.18823529411764706, \"#424086\"], [0.25098039215686274, \"#3b528b\"], [0.3137254901960784, \"#33638d\"],", "4, 255)), (1.0, (255, 0, 0, 255)), ], # this is a hsv,", "+ \"_reversed\"] = reversed_scale # Generate also all scales with cliping at green", "= (1.0, clip_color) if scale[-2][0] > 1 - clip_percent: scale_high[-2] = ((scale[-1][0] +", "scale_low.insert(1, scale[0]) scale_low[0] = (0.0, clip_color) if scale[1][0] < clip_percent: scale_low[1] = ((scale[1][0]", "0, 255, 255)), (0.75, (255, 0, 0, 255)), ], \"spectrum\": [ (1.0, (255,", "didn't patch qcodes to allow the specification of that part... # \"greyclip\": [", "(0.2, (7, 0, 220, 255)), (0.5, (236, 0, 134, 255)), (0.8, (246, 246,", "uniform sequential colormaps from Matplotlib 2.0 \"viridis\": [ (0.0, (68, 1, 84, 255)),", "qcodes_anglemap45 = make_qcodes_anglemap45() colorscales_raw[\"anglemap45\"] = qcodes_anglemap45 def make_rgba(colorscale): return [(v, one_rgba(c)) for v,", "\"#17becf\", # blue-teal ] colorscales_raw = { \"Greys\": [[0, \"rgb(0,0,0)\"], [1, \"rgb(255,255,255)\"]], \"YlGnBu\":", "\"#7f7f7f\", # middle gray \"#bcbd22\", # curry yellow-green \"#17becf\", # blue-teal ] colorscales_raw", "values \"Reds\": [ [0, \"rgb(220, 220, 220)\"], [0.2, \"rgb(245, 195, 157)\"], [0.4, \"rgb(245,", "145, 192)\"], [0.5, \"rgb(65, 182, 196)\"], [0.625, \"rgb(127, 205, 187)\"], [0.75, \"rgb(199, 233,", "\"rgb(106, 137, 247)\"], [1, \"rgb(220, 220, 220)\"], ], \"picnic\": [ [0, \"rgb(0,0,255)\"], [0.1,", "(255, 255, 255, 255)), (0, (0, 0, 0, 255)), ], \"flame\": [ (0.2,", "255)), # ], \"grey\": [(0.0, (0, 0, 0, 255)), (1.0, (255, 255, 255,", "168)\"], [0.375, \"rgb(29, 145, 192)\"], [0.5, \"rgb(65, 182, 196)\"], [0.625, \"rgb(127, 205, 187)\"],", "\"rgb(230,230,50)\"], [0.6, \"rgb(120,70,20)\"], [1, \"rgb(255,255,255)\"], ], \"electric\": [ [0, \"rgb(0,0,0)\"], [0.15, \"rgb(30,0,100)\"], [0.4,", "also all scales with cliping at green for name, scale in list(colorscales.items()): clip_percent", "<NAME> for Measurement Control It modules makes available all the colors maps from", "/ 2, scale_high[-2][1]) else: scale_high[-2] = (1 - clip_percent, scale_high[-2][1]) colorscales[name + \"_clip_high\"]", "180)\"], [0.875, \"rgb(237, 248, 217)\"], [1, \"rgb(255, 255, 217)\"], ], \"Greens\": [ [0,", "[ [0, \"rgb(0,0,131)\"], [0.125, \"rgb(0,60,170)\"], [0.375, \"rgb(5,255,255)\"], [0.625, \"rgb(255,255,0)\"], [0.875, \"rgb(250,0,0)\"], [1, \"rgb(128,0,0)\"],", "255)), (0.75, (94, 201, 97, 255)), (1.0, (253, 231, 36, 255)), ], \"inferno\":", "colors See \"make_qcodes_anglemap\" and \"make_anglemap45_colorlist\" below to get you started. \"\"\" from pycqed.analysis.tools.plotting", "255, 217)\"], ], \"Greens\": [ [0, \"rgb(0, 68, 27)\"], [0.125, \"rgb(0, 109, 44)\"],", "178, 76)\"], [0.75, \"rgb(254, 217, 118)\"], [0.875, \"rgb(255, 237, 160)\"], [1, \"rgb(255, 255,", "single color value to (r, g, b, a) input can be an rgb", "* 255) for x in col)))] for i, col in enumerate(anglemap_colorlist) ] return", "], \"bluered\": [[0, \"rgb(0,0,255)\"], [1, \"rgb(255,0,0)\"]], # modified RdBu based on # www.sandia.gov/~kmorel/documents/ColorMaps/ColorMapsExpanded.pdf", "scale_low[0] = (0.0, clip_color) if scale[1][0] < clip_percent: scale_low[1] = ((scale[1][0] + scale[0][0])", "192)\"], [0.5, \"rgb(65, 182, 196)\"], [0.625, \"rgb(127, 205, 187)\"], [0.75, \"rgb(199, 233, 180)\"],", "\"electric\": [ [0, \"rgb(0,0,0)\"], [0.15, \"rgb(30,0,100)\"], [0.4, \"rgb(120,0,100)\"], [0.6, \"rgb(160,90,0)\"], [0.8, \"rgb(230,200,0)\"], [1,", "\"rgb(30,0,100)\"], [0.4, \"rgb(120,0,100)\"], [0.6, \"rgb(160,90,0)\"], [0.8, \"rgb(230,200,0)\"], [1, \"rgb(255,250,220)\"], ], \"viridis\": [ [0,", "(0, (0, 0, 0, 255)), ], \"flame\": [ (0.2, (7, 0, 220, 255)),", "134, 255)), (0.8, (246, 246, 0, 255)), (1.0, (255, 255, 255, 255)), (0.0,", "\"#d62728\", # brick red \"#9467bd\", # muted purple \"#8c564b\", # chestnut brown \"#e377c2\",", "Perceptually uniform sequential colormaps from Matplotlib 2.0 \"viridis\": [ (0.0, (68, 1, 84,", "(0.5, (236, 0, 134, 255)), (0.8, (246, 246, 0, 255)), (1.0, (255, 255,", "(0.0, (12, 7, 134, 255)), (0.25, (126, 3, 167, 255)), (0.5, (203, 71,", "16), int(c[5:7], 16), 255) if c[:4] == \"rgb(\": return tuple(map(int, c[4:-1].split(\",\"))) + (255,)", "[0.3137254901960784, \"#33638d\"], [0.3764705882352941, \"#2c728e\"], [0.4392156862745098, \"#26828e\"], [0.5019607843137255, \"#21918c\"], [0.5647058823529412, \"#1fa088\"], [0.6274509803921569, \"#28ae80\"], [0.6901960784313725,", "255)), # (0.99, (255, 255, 255, 255)), # (1.0, (255, 0, 0, 255)),", "muted purple \"#8c564b\", # chestnut brown \"#e377c2\", # raspberry yogurt pink \"#7f7f7f\", #", "= reversed_scale # Generate also all scales with cliping at green for name,", "g, b, a) input can be an rgb string 'rgb(r,g,b)', '#rrggbb' if we", "in list(colorscales.items()): clip_percent = 0.03 clip_color = (0, 255, 0, 255) scale_low =", "scale_low[1][1]) else: scale_low[1] = (clip_percent, scale_low[1][1]) colorscales[name + \"_clip_low\"] = scale_low scale_high =", "colorscales, taken from plotly color_cycle = [ \"#1f77b4\", # muted blue \"#ff7f0e\", #", "(1.0, (255, 0, 0, 255)), # ], \"grey\": [(0.0, (0, 0, 0, 255)),", "255, 0, 255)), (0.5257586450247, (115, 15, 255, 255)), (1.0, (255, 255, 255, 255)),", "col in enumerate(anglemap_colorlist) ] return color_scale qcodes_anglemap45 = make_qcodes_anglemap45() colorscales_raw[\"anglemap45\"] = qcodes_anglemap45 def", "(0, 0, 0, 255)), (0.25, (0, 0, 255, 255)), (0.75, (255, 0, 0,", "scales with cliping at green for name, scale in list(colorscales.items()): clip_percent = 0.03", "version of all of them. Feel free to add new colors See \"make_qcodes_anglemap\"", "255)), (1.0, (255, 255, 255, 255)), (0.0, (0, 0, 0, 255)), ], \"yellowy\":", "140, 255)), (0.75, (94, 201, 97, 255)), (1.0, (253, 231, 36, 255)), ],", "scale_high = list(scale) scale_high.insert(-1, scale[-1]) scale_high[-1] = (1.0, clip_color) if scale[-2][0] > 1", "makes available all the colors maps from the qcodes, context menu of the", "129, 255)), (0.8362738179251941, (255, 255, 0, 255)), (0.5257586450247, (115, 15, 255, 255)), (1.0,", "[0.5, \"rgb(116, 196, 118)\"], [0.625, \"rgb(161, 217, 155)\"], [0.75, \"rgb(199, 233, 192)\"], [0.875,", "[0.4, \"rgb(230,230,50)\"], [0.6, \"rgb(120,70,20)\"], [1, \"rgb(255,255,255)\"], ], \"electric\": [ [0, \"rgb(0,0,0)\"], [0.15, \"rgb(30,0,100)\"],", "(0.0, (0, 0, 3, 255)), (0.25, (87, 15, 109, 255)), (0.5, (187, 55,", "[0.2, \"rgb(40,210,40)\"], [0.4, \"rgb(230,230,50)\"], [0.6, \"rgb(120,70,20)\"], [1, \"rgb(255,255,255)\"], ], \"electric\": [ [0, \"rgb(0,0,0)\"],", "# blue-teal ] colorscales_raw = { \"Greys\": [[0, \"rgb(0,0,0)\"], [1, \"rgb(255,255,255)\"]], \"YlGnBu\": [", "\"#fde725\"], ], } # Extracted https://github.com/pyqtgraph/pyqtgraph/blob/develop/pyqtgraph/graphicsItems/GradientEditorItem.py Gradients = { \"thermal\": [ (0.3333, (185,", "from plotly color_cycle = [ \"#1f77b4\", # muted blue \"#ff7f0e\", # safety orange", "(126, 3, 167, 255)), (0.5, (203, 71, 119, 255)), (0.75, (248, 149, 64,", "((scale[-1][0] + scale[-2][0]) / 2, scale_high[-2][1]) else: scale_high[-2] = (1 - clip_percent, scale_high[-2][1])", "[ [0, \"rgb(5, 10, 172)\"], [0.35, \"rgb(40, 60, 190)\"], [0.5, \"rgb(70, 100, 245)\"],", "255) if c[:4] == \"rgb(\": return tuple(map(int, c[4:-1].split(\",\"))) + (255,) raise ValueError(\"one_rgba only", "(r, g, b, a) input can be an rgb string 'rgb(r,g,b)', '#rrggbb' if", "= (0, 255, 0, 255) scale_low = list(scale) scale_low.insert(1, scale[0]) scale_low[0] = (0.0,", "(12, 7, 134, 255)), (0.25, (126, 3, 167, 255)), (0.5, (203, 71, 119,", "121, 255)), (0.75, (251, 136, 97, 255)), (1.0, (251, 252, 191, 255)), ],", "\"#\" and len(c) == 7: return (int(c[1:3], 16), int(c[3:5], 16), int(c[5:7], 16), 255)", "\"#3fbc73\"], [0.7529411764705882, \"#5ec962\"], [0.8156862745098039, \"#84d44b\"], [0.8784313725490196, \"#addc30\"], [0.9411764705882353, \"#d8e219\"], [1, \"#fde725\"], ], }", "def make_rgba(colorscale): return [(v, one_rgba(c)) for v, c in colorscale] def one_rgba(c): \"\"\"", "\"#5ec962\"], [0.8156862745098039, \"#84d44b\"], [0.8784313725490196, \"#addc30\"], [0.9411764705882353, \"#d8e219\"], [1, \"#fde725\"], ], } # Extracted", "[0.1, \"rgb(0,180,180)\"], [0.2, \"rgb(40,210,40)\"], [0.4, \"rgb(230,230,50)\"], [0.6, \"rgb(120,70,20)\"], [1, \"rgb(255,255,255)\"], ], \"electric\": [", "(255, 255, 255, 255)), ], \"bipolar\": [ (0.0, (0, 255, 255, 255)), (1.0,", "\"rgb(255, 111, 0)\"], [1, \"rgb(255, 0, 0)\"], ], \"portland\": [ [0, \"rgb(12,51,131)\"], [0.25,", "\"_clip_low\"] = scale_low scale_high = list(scale) scale_high.insert(-1, scale[-1]) scale_high[-1] = (1.0, clip_color) if", "based on # www.sandia.gov/~kmorel/documents/ColorMaps/ColorMapsExpanded.pdf \"RdBu\": [ [0, \"rgb(5, 10, 172)\"], [0.35, \"rgb(106, 137,", "247)\"], [0.5, \"rgb(190,190,190)\"], [0.6, \"rgb(220, 170, 132)\"], [0.7, \"rgb(230, 145, 90)\"], [1, \"rgb(178,", "\"rgb(245, 195, 157)\"], [0.4, \"rgb(245, 160, 105)\"], [1, \"rgb(178, 10, 28)\"], ], #", "0, 0, 255)), ], # this is a hsv, didn't patch qcodes to", "{ \"Greys\": [[0, \"rgb(0,0,0)\"], [1, \"rgb(255,255,255)\"]], \"YlGnBu\": [ [0, \"rgb(8, 29, 88)\"], [0.125,", "color[1]) for i, color in enumerate(scale) ] colorscales[name + \"_reversed\"] = reversed_scale #", "[0.625, \"rgb(255,255,0)\"], [0.875, \"rgb(250,0,0)\"], [1, \"rgb(128,0,0)\"], ], \"hot\": [ [0, \"rgb(0,0,0)\"], [0.3, \"rgb(230,0,0)\"],", "[0, \"rgb(150,0,90)\"], [0.125, \"rgb(0, 0, 200)\"], [0.25, \"rgb(0, 25, 255)\"], [0.375, \"rgb(0, 152,", "more we can make more, but for now this is just to convert", "\"rgb(0, 0, 200)\"], [0.25, \"rgb(0, 25, 255)\"], [0.375, \"rgb(0, 152, 255)\"], [0.5, \"rgb(44,", "purple \"#8c564b\", # chestnut brown \"#e377c2\", # raspberry yogurt pink \"#7f7f7f\", # middle", "to convert plotly colorscales to pyqtgraph tuples \"\"\" if c[0] == \"#\" and", "\"picnic\": [ [0, \"rgb(0,0,255)\"], [0.1, \"rgb(51,153,255)\"], [0.2, \"rgb(102,204,255)\"], [0.3, \"rgb(153,204,255)\"], [0.4, \"rgb(204,204,255)\"], [0.5,", "\"rgb(90, 120, 245)\"], [0.7, \"rgb(106, 137, 247)\"], [1, \"rgb(220, 220, 220)\"], ], \"picnic\":", "0, 38)\"], [0.25, \"rgb(227, 26, 28)\"], [0.375, \"rgb(252, 78, 42)\"], [0.5, \"rgb(253, 141,", "and the reversed version of all of them. Feel free to add new", "It modules makes available all the colors maps from the qcodes, context menu", "217)\"], ], \"Greens\": [ [0, \"rgb(0, 68, 27)\"], [0.125, \"rgb(0, 109, 44)\"], [0.25,", "= make_anglemap45_colorlist(N=9, use_hpl=False) len_colorlist = len(anglemap_colorlist) color_scale = [ [i / (len_colorlist -", "the original qcodes.plots.colors Mofied by <NAME> for Measurement Control It modules makes available", "/ (len_colorlist - 1), \"rgb\" + repr(tuple((int(x * 255) for x in col)))]", "def one_rgba(c): \"\"\" convert a single color value to (r, g, b, a)", "\"rgb(29, 145, 192)\"], [0.5, \"rgb(65, 182, 196)\"], [0.625, \"rgb(127, 205, 187)\"], [0.75, \"rgb(199,", "[0.6, \"rgb(255,204,255)\"], [0.7, \"rgb(255,153,255)\"], [0.8, \"rgb(255,102,204)\"], [0.9, \"rgb(255,102,102)\"], [1, \"rgb(255,0,0)\"], ], \"rainbow\": [", "use_hpl=False) len_colorlist = len(anglemap_colorlist) color_scale = [ [i / (len_colorlist - 1), \"rgb\"", "menu of the color bar from pyqtgraph, the circular colormap created by me", "tuples \"\"\" if c[0] == \"#\" and len(c) == 7: return (int(c[1:3], 16),", "[ (0.0, (0, 0, 3, 255)), (0.25, (80, 18, 123, 255)), (0.5, (182,", "\"viridis\": [ (0.0, (68, 1, 84, 255)), (0.25, (58, 82, 139, 255)), (0.5,", "171, 93)\"], [0.5, \"rgb(116, 196, 118)\"], [0.625, \"rgb(161, 217, 155)\"], [0.75, \"rgb(199, 233,", "[ (0.0, (0, 255, 255, 255)), (1.0, (255, 255, 0, 255)), (0.5, (0,", "(0.0, (68, 1, 84, 255)), (0.25, (58, 82, 139, 255)), (0.5, (32, 144,", "(255, 255, 255, 255)), # (1.0, (255, 0, 0, 255)), # ], \"grey\":", "= { \"Greys\": [[0, \"rgb(0,0,0)\"], [1, \"rgb(255,255,255)\"]], \"YlGnBu\": [ [0, \"rgb(8, 29, 88)\"],", "(187, 55, 84, 255)), (0.75, (249, 142, 8, 255)), (1.0, (252, 254, 164,", "(80, 18, 123, 255)), (0.5, (182, 54, 121, 255)), (0.75, (251, 136, 97,", "(0.5, (203, 71, 119, 255)), (0.75, (248, 149, 64, 255)), (1.0, (239, 248,", "1 - clip_percent: scale_high[-2] = ((scale[-1][0] + scale[-2][0]) / 2, scale_high[-2][1]) else: scale_high[-2]", "\"portland\": [ [0, \"rgb(12,51,131)\"], [0.25, \"rgb(10,136,186)\"], [0.5, \"rgb(242,211,56)\"], [0.75, \"rgb(242,143,56)\"], [1, \"rgb(217,30,30)\"], ],", "def make_qcodes_anglemap45(): anglemap_colorlist = make_anglemap45_colorlist(N=9, use_hpl=False) len_colorlist = len(anglemap_colorlist) color_scale = [ [i", "0)\"], [1, \"rgb(255, 0, 0)\"], ], \"portland\": [ [0, \"rgb(12,51,131)\"], [0.25, \"rgb(10,136,186)\"], [0.5,", "255, 255, 255)), (0, (0, 0, 0, 255)), ], \"flame\": [ (0.2, (7,", "(int(c[1:3], 16), int(c[3:5], 16), int(c[5:7], 16), 255) if c[:4] == \"rgb(\": return tuple(map(int,", "] return color_scale qcodes_anglemap45 = make_qcodes_anglemap45() colorscales_raw[\"anglemap45\"] = qcodes_anglemap45 def make_rgba(colorscale): return [(v,", "255, 0, 255)), (0.5, (0, 0, 0, 255)), (0.25, (0, 0, 255, 255)),", "134, 255)), (0.25, (126, 3, 167, 255)), (0.5, (203, 71, 119, 255)), (0.75,", "all of them. Feel free to add new colors See \"make_qcodes_anglemap\" and \"make_anglemap45_colorlist\"", "\"rgb(0, 68, 27)\"], [0.125, \"rgb(0, 109, 44)\"], [0.25, \"rgb(35, 139, 69)\"], [0.375, \"rgb(65,", "255)), ], \"yellowy\": [ (0.0, (0, 0, 0, 255)), (0.2328863796753704, (32, 0, 129,", "255, 255)), (1.0, (255, 255, 0, 255)), (0.5, (0, 0, 0, 255)), (0.25,", "(94, 201, 97, 255)), (1.0, (253, 231, 36, 255)), ], \"inferno\": [ (0.0,", "(0, 0, 3, 255)), (0.25, (80, 18, 123, 255)), (0.5, (182, 54, 121,", "172)\"], [0.35, \"rgb(106, 137, 247)\"], [0.5, \"rgb(190,190,190)\"], [0.6, \"rgb(220, 170, 132)\"], [0.7, \"rgb(230,", "and len(c) == 7: return (int(c[1:3], 16), int(c[3:5], 16), int(c[5:7], 16), 255) if", "[2020-02-03] Modified version of the original qcodes.plots.colors Mofied by <NAME> for Measurement Control", "25, 255)\"], [0.375, \"rgb(0, 152, 255)\"], [0.5, \"rgb(44, 255, 150)\"], [0.625, \"rgb(151, 255,", "part... \"cyclic\": [ (0.0, (255, 0, 4, 255)), (1.0, (255, 0, 0, 255)),", "= list(scale) scale_low.insert(1, scale[0]) scale_low[0] = (0.0, clip_color) if scale[1][0] < clip_percent: scale_low[1]", "\"#472d7b\"], [0.18823529411764706, \"#424086\"], [0.25098039215686274, \"#3b528b\"], [0.3137254901960784, \"#33638d\"], [0.3764705882352941, \"#2c728e\"], [0.4392156862745098, \"#26828e\"], [0.5019607843137255, \"#21918c\"],", "0, 255)), (1, (255, 255, 255, 255)), (0, (0, 0, 0, 255)), ],", "\"hot\": [ [0, \"rgb(0,0,0)\"], [0.3, \"rgb(230,0,0)\"], [0.6, \"rgb(255,210,0)\"], [1, \"rgb(255,255,255)\"], ], \"blackbody\": [", "can make more, but for now this is just to convert plotly colorscales", "input can be an rgb string 'rgb(r,g,b)', '#rrggbb' if we decide we want", "255, 204)\"], ], \"bluered\": [[0, \"rgb(0,0,255)\"], [1, \"rgb(255,0,0)\"]], # modified RdBu based on", "part... # \"greyclip\": [ # (0.0, (0, 0, 0, 255)), # (0.99, (255,", "# this is a hsv, didn't patch qcodes to allow the specification of", "(32, 144, 140, 255)), (0.75, (94, 201, 97, 255)), (1.0, (253, 231, 36,", "if we decide we want more we can make more, but for now", "[0.1, \"rgb(51,153,255)\"], [0.2, \"rgb(102,204,255)\"], [0.3, \"rgb(153,204,255)\"], [0.4, \"rgb(204,204,255)\"], [0.5, \"rgb(255,255,255)\"], [0.6, \"rgb(255,204,255)\"], [0.7,", "(255, 0, 0, 255)), # ], \"grey\": [(0.0, (0, 0, 0, 255)), (1.0,", "scale_low scale_high = list(scale) scale_high.insert(-1, scale[-1]) scale_high[-1] = (1.0, clip_color) if scale[-2][0] >", "\"rgb(242,211,56)\"], [0.75, \"rgb(242,143,56)\"], [1, \"rgb(217,30,30)\"], ], \"jet\": [ [0, \"rgb(0,0,131)\"], [0.125, \"rgb(0,60,170)\"], [0.375,", "\"rgb(250,0,0)\"], [1, \"rgb(128,0,0)\"], ], \"hot\": [ [0, \"rgb(0,0,0)\"], [0.3, \"rgb(230,0,0)\"], [0.6, \"rgb(255,210,0)\"], [1,", "94, 168)\"], [0.375, \"rgb(29, 145, 192)\"], [0.5, \"rgb(65, 182, 196)\"], [0.625, \"rgb(127, 205,", "\"rainbow\": [ [0, \"rgb(150,0,90)\"], [0.125, \"rgb(0, 0, 200)\"], [0.25, \"rgb(0, 25, 255)\"], [0.375,", "make_anglemap45_colorlist # default colors and colorscales, taken from plotly color_cycle = [ \"#1f77b4\",", "\"jet\": [ [0, \"rgb(0,0,131)\"], [0.125, \"rgb(0,60,170)\"], [0.375, \"rgb(5,255,255)\"], [0.625, \"rgb(255,255,0)\"], [0.875, \"rgb(250,0,0)\"], [1,", "255)), # (1.0, (255, 0, 0, 255)), # ], \"grey\": [(0.0, (0, 0,", "[ [0, \"rgb(0,0,0)\"], [0.2, \"rgb(230,0,0)\"], [0.4, \"rgb(230,210,0)\"], [0.7, \"rgb(255,255,255)\"], [1, \"rgb(160,200,255)\"], ], \"earth\":", "\"#84d44b\"], [0.8784313725490196, \"#addc30\"], [0.9411764705882353, \"#d8e219\"], [1, \"#fde725\"], ], } # Extracted https://github.com/pyqtgraph/pyqtgraph/blob/develop/pyqtgraph/graphicsItems/GradientEditorItem.py Gradients", "# ], \"grey\": [(0.0, (0, 0, 0, 255)), (1.0, (255, 255, 255, 255))],", "[1, \"rgb(255,0,0)\"], ], \"rainbow\": [ [0, \"rgb(150,0,90)\"], [0.125, \"rgb(0, 0, 200)\"], [0.25, \"rgb(0,", "(0.25, (0, 0, 255, 255)), (0.75, (255, 0, 0, 255)), ], \"spectrum\": [", "blue \"#ff7f0e\", # safety orange \"#2ca02c\", # cooked asparagus green \"#d62728\", # brick", "137, 247)\"], [0.5, \"rgb(190,190,190)\"], [0.6, \"rgb(220, 170, 132)\"], [0.7, \"rgb(230, 145, 90)\"], [1,", "120, 245)\"], [0.7, \"rgb(106, 137, 247)\"], [1, \"rgb(220, 220, 220)\"], ], \"picnic\": [", "28)\"], ], # Scale for non-negative numeric values \"Reds\": [ [0, \"rgb(220, 220,", "], \"spectrum\": [ (1.0, (255, 0, 255, 255)), (0.0, (255, 0, 0, 255)),", "(1.0, (252, 254, 164, 255)), ], \"plasma\": [ (0.0, (12, 7, 134, 255)),", "\"rgb(0,0,0)\"], [0.2, \"rgb(230,0,0)\"], [0.4, \"rgb(230,210,0)\"], [0.7, \"rgb(255,255,255)\"], [1, \"rgb(160,200,255)\"], ], \"earth\": [ [0,", "qcodes, context menu of the color bar from pyqtgraph, the circular colormap created", "(0, 0, 3, 255)), (0.25, (87, 15, 109, 255)), (0.5, (187, 55, 84,", "# safety orange \"#2ca02c\", # cooked asparagus green \"#d62728\", # brick red \"#9467bd\",", "safety orange \"#2ca02c\", # cooked asparagus green \"#d62728\", # brick red \"#9467bd\", #", "can be an rgb string 'rgb(r,g,b)', '#rrggbb' if we decide we want more", "name, scale in list(colorscales.items()): last_idx = len(scale) - 1 reversed_scale = [ (scale[last_idx", "muted blue \"#ff7f0e\", # safety orange \"#2ca02c\", # cooked asparagus green \"#d62728\", #", "(0.0, (255, 0, 4, 255)), (1.0, (255, 0, 0, 255)), ], # this", "scale_low[1] = ((scale[1][0] + scale[0][0]) / 2, scale_low[1][1]) else: scale_low[1] = (clip_percent, scale_low[1][1])", "255, 255)), (0.75, (255, 0, 0, 255)), ], \"spectrum\": [ (1.0, (255, 0,", "scale in colorscales_raw.items(): colorscales[scale_name] = make_rgba(scale) for scale_name, scale in Gradients.items(): colorscales[scale_name] =", "in colorscale] def one_rgba(c): \"\"\" convert a single color value to (r, g,", "64, 255)), (1.0, (239, 248, 33, 255)), ], \"magma\": [ (0.0, (0, 0,", "decide we want more we can make more, but for now this is", "[0.7, \"rgb(255,153,255)\"], [0.8, \"rgb(255,102,204)\"], [0.9, \"rgb(255,102,102)\"], [1, \"rgb(255,0,0)\"], ], \"rainbow\": [ [0, \"rgb(150,0,90)\"],", "the reversed version of all of them. Feel free to add new colors", "them. Feel free to add new colors See \"make_qcodes_anglemap\" and \"make_anglemap45_colorlist\" below to", "], # this is a hsv, didn't patch qcodes to allow the specification", "255, 255)), # (1.0, (255, 0, 0, 255)), # ], \"grey\": [(0.0, (0,", "(255, 0, 255, 255)), (0.0, (255, 0, 0, 255)), ], # this is", "[1, \"rgb(255,255,255)\"], ], \"blackbody\": [ [0, \"rgb(0,0,0)\"], [0.2, \"rgb(230,0,0)\"], [0.4, \"rgb(230,210,0)\"], [0.7, \"rgb(255,255,255)\"],", "191, 255)), ], } def make_qcodes_anglemap45(): anglemap_colorlist = make_anglemap45_colorlist(N=9, use_hpl=False) len_colorlist = len(anglemap_colorlist)", "255)), ], \"inferno\": [ (0.0, (0, 0, 3, 255)), (0.25, (87, 15, 109,", "[0.2, \"rgb(102,204,255)\"], [0.3, \"rgb(153,204,255)\"], [0.4, \"rgb(204,204,255)\"], [0.5, \"rgb(255,255,255)\"], [0.6, \"rgb(255,204,255)\"], [0.7, \"rgb(255,153,255)\"], [0.8,", "0, 0, 255)), (0.6666, (255, 220, 0, 255)), (1, (255, 255, 255, 255)),", "245)\"], [0.6, \"rgb(90, 120, 245)\"], [0.7, \"rgb(106, 137, 247)\"], [1, \"rgb(220, 220, 220)\"],", "\"rgb(253, 141, 60)\"], [0.625, \"rgb(254, 178, 76)\"], [0.75, \"rgb(254, 217, 118)\"], [0.875, \"rgb(255,", "90)\"], [1, \"rgb(178, 10, 28)\"], ], # Scale for non-negative numeric values \"Reds\":", "111, 0)\"], [1, \"rgb(255, 0, 0)\"], ], \"portland\": [ [0, \"rgb(12,51,131)\"], [0.25, \"rgb(10,136,186)\"],", "192)\"], [0.875, \"rgb(229, 245, 224)\"], [1, \"rgb(247, 252, 245)\"], ], \"YlOrRd\": [ [0,", "(255, 0, 0, 255)), ], # this is a hsv, didn't patch qcodes", "54, 121, 255)), (0.75, (251, 136, 97, 255)), (1.0, (251, 252, 191, 255)),", "(255,) raise ValueError(\"one_rgba only supports rgb(r,g,b) and #rrggbb colors\") colorscales = {} for", "[0.25, \"rgb(227, 26, 28)\"], [0.375, \"rgb(252, 78, 42)\"], [0.5, \"rgb(253, 141, 60)\"], [0.625,", "[ [0, \"rgb(0,0,130)\"], [0.1, \"rgb(0,180,180)\"], [0.2, \"rgb(40,210,40)\"], [0.4, \"rgb(230,230,50)\"], [0.6, \"rgb(120,70,20)\"], [1, \"rgb(255,255,255)\"],", "255)), (0.75, (251, 136, 97, 255)), (1.0, (251, 252, 191, 255)), ], }", "version of the original qcodes.plots.colors Mofied by <NAME> for Measurement Control It modules", "(1.0, (255, 255, 255, 255)), ], \"bipolar\": [ (0.0, (0, 255, 255, 255)),", "color_scale qcodes_anglemap45 = make_qcodes_anglemap45() colorscales_raw[\"anglemap45\"] = qcodes_anglemap45 def make_rgba(colorscale): return [(v, one_rgba(c)) for", "0, 0)\"], ], \"portland\": [ [0, \"rgb(12,51,131)\"], [0.25, \"rgb(10,136,186)\"], [0.5, \"rgb(242,211,56)\"], [0.75, \"rgb(242,143,56)\"],", "(0.3333, (185, 0, 0, 255)), (0.6666, (255, 220, 0, 255)), (1, (255, 255,", "scale[0][0]) / 2, scale_low[1][1]) else: scale_low[1] = (clip_percent, scale_low[1][1]) colorscales[name + \"_clip_low\"] =", "= make_qcodes_anglemap45() colorscales_raw[\"anglemap45\"] = qcodes_anglemap45 def make_rgba(colorscale): return [(v, one_rgba(c)) for v, c", "\"rgb(5,255,255)\"], [0.625, \"rgb(255,255,0)\"], [0.875, \"rgb(250,0,0)\"], [1, \"rgb(128,0,0)\"], ], \"hot\": [ [0, \"rgb(0,0,0)\"], [0.3,", "157)\"], [0.4, \"rgb(245, 160, 105)\"], [1, \"rgb(178, 10, 28)\"], ], # Scale for", "93)\"], [0.5, \"rgb(116, 196, 118)\"], [0.625, \"rgb(161, 217, 155)\"], [0.75, \"rgb(199, 233, 192)\"],", "3, 167, 255)), (0.5, (203, 71, 119, 255)), (0.75, (248, 149, 64, 255)),", "[0.5, \"rgb(65, 182, 196)\"], [0.625, \"rgb(127, 205, 187)\"], [0.75, \"rgb(199, 233, 180)\"], [0.875,", "27)\"], [0.125, \"rgb(0, 109, 44)\"], [0.25, \"rgb(35, 139, 69)\"], [0.375, \"rgb(65, 171, 93)\"],", "\"viridis\": [ [0, \"#440154\"], [0.06274509803921569, \"#48186a\"], [0.12549019607843137, \"#472d7b\"], [0.18823529411764706, \"#424086\"], [0.25098039215686274, \"#3b528b\"], [0.3137254901960784,", "(0.5257586450247, (115, 15, 255, 255)), (1.0, (255, 255, 255, 255)), ], \"bipolar\": [", "\"rgb(153,204,255)\"], [0.4, \"rgb(204,204,255)\"], [0.5, \"rgb(255,255,255)\"], [0.6, \"rgb(255,204,255)\"], [0.7, \"rgb(255,153,255)\"], [0.8, \"rgb(255,102,204)\"], [0.9, \"rgb(255,102,102)\"],", "[0.7, \"rgb(255,255,255)\"], [1, \"rgb(160,200,255)\"], ], \"earth\": [ [0, \"rgb(0,0,130)\"], [0.1, \"rgb(0,180,180)\"], [0.2, \"rgb(40,210,40)\"],", "0, 3, 255)), (0.25, (87, 15, 109, 255)), (0.5, (187, 55, 84, 255)),", "you started. \"\"\" from pycqed.analysis.tools.plotting import make_anglemap45_colorlist # default colors and colorscales, taken", "\"rgb(255, 234, 0)\"], [0.875, \"rgb(255, 111, 0)\"], [1, \"rgb(255, 0, 0)\"], ], \"portland\":", "], # Scale for non-negative numeric values \"Reds\": [ [0, \"rgb(220, 220, 220)\"],", "[0.8784313725490196, \"#addc30\"], [0.9411764705882353, \"#d8e219\"], [1, \"#fde725\"], ], } # Extracted https://github.com/pyqtgraph/pyqtgraph/blob/develop/pyqtgraph/graphicsItems/GradientEditorItem.py Gradients =", "yellow-green \"#17becf\", # blue-teal ] colorscales_raw = { \"Greys\": [[0, \"rgb(0,0,0)\"], [1, \"rgb(255,255,255)\"]],", "# brick red \"#9467bd\", # muted purple \"#8c564b\", # chestnut brown \"#e377c2\", #", "[0.25, \"rgb(10,136,186)\"], [0.5, \"rgb(242,211,56)\"], [0.75, \"rgb(242,143,56)\"], [1, \"rgb(217,30,30)\"], ], \"jet\": [ [0, \"rgb(0,0,131)\"],", "], } # Extracted https://github.com/pyqtgraph/pyqtgraph/blob/develop/pyqtgraph/graphicsItems/GradientEditorItem.py Gradients = { \"thermal\": [ (0.3333, (185, 0,", "88)\"], [0.125, \"rgb(37, 52, 148)\"], [0.25, \"rgb(34, 94, 168)\"], [0.375, \"rgb(29, 145, 192)\"],", "255) for x in col)))] for i, col in enumerate(anglemap_colorlist) ] return color_scale", "255, 255)), (1.0, (255, 255, 255, 255)), ], \"bipolar\": [ (0.0, (0, 255,", "0)\"], [0.75, \"rgb(255, 234, 0)\"], [0.875, \"rgb(255, 111, 0)\"], [1, \"rgb(255, 0, 0)\"],", "60)\"], [0.625, \"rgb(254, 178, 76)\"], [0.75, \"rgb(254, 217, 118)\"], [0.875, \"rgb(255, 237, 160)\"],", "\"rgb(245, 160, 105)\"], [1, \"rgb(178, 10, 28)\"], ], # Scale for non-positive numeric", "], \"inferno\": [ (0.0, (0, 0, 3, 255)), (0.25, (87, 15, 109, 255)),", "more, but for now this is just to convert plotly colorscales to pyqtgraph", "152, 255)\"], [0.5, \"rgb(44, 255, 150)\"], [0.625, \"rgb(151, 255, 0)\"], [0.75, \"rgb(255, 234,", "1 reversed_scale = [ (scale[last_idx - i][0], color[1]) for i, color in enumerate(scale)", "255, 0)\"], [0.75, \"rgb(255, 234, 0)\"], [0.875, \"rgb(255, 111, 0)\"], [1, \"rgb(255, 0,", "\"rgb(120,0,100)\"], [0.6, \"rgb(160,90,0)\"], [0.8, \"rgb(230,200,0)\"], [1, \"rgb(255,250,220)\"], ], \"viridis\": [ [0, \"#440154\"], [0.06274509803921569,", "245)\"], [0.7, \"rgb(106, 137, 247)\"], [1, \"rgb(220, 220, 220)\"], ], \"picnic\": [ [0,", "of the color bar from pyqtgraph, the circular colormap created by me (Victo),", "255, 255)), (0, (0, 0, 0, 255)), ], \"flame\": [ (0.2, (7, 0,", "255)), (0.25, (80, 18, 123, 255)), (0.5, (182, 54, 121, 255)), (0.75, (251,", "colorscales_raw = { \"Greys\": [[0, \"rgb(0,0,0)\"], [1, \"rgb(255,255,255)\"]], \"YlGnBu\": [ [0, \"rgb(8, 29,", "[0.5, \"rgb(253, 141, 60)\"], [0.625, \"rgb(254, 178, 76)\"], [0.75, \"rgb(254, 217, 118)\"], [0.875,", "\"rgb(0, 109, 44)\"], [0.25, \"rgb(35, 139, 69)\"], [0.375, \"rgb(65, 171, 93)\"], [0.5, \"rgb(116,", "[0.12549019607843137, \"#472d7b\"], [0.18823529411764706, \"#424086\"], [0.25098039215686274, \"#3b528b\"], [0.3137254901960784, \"#33638d\"], [0.3764705882352941, \"#2c728e\"], [0.4392156862745098, \"#26828e\"], [0.5019607843137255,", "= [ (scale[last_idx - i][0], color[1]) for i, color in enumerate(scale) ] colorscales[name", "205, 187)\"], [0.75, \"rgb(199, 233, 180)\"], [0.875, \"rgb(237, 248, 217)\"], [1, \"rgb(255, 255,", "190)\"], [0.5, \"rgb(70, 100, 245)\"], [0.6, \"rgb(90, 120, 245)\"], [0.7, \"rgb(106, 137, 247)\"],", "(249, 142, 8, 255)), (1.0, (252, 254, 164, 255)), ], \"plasma\": [ (0.0,", "(1.0, (239, 248, 33, 255)), ], \"magma\": [ (0.0, (0, 0, 3, 255)),", "255)), (0.5, (0, 0, 0, 255)), (0.25, (0, 0, 255, 255)), (0.75, (255,", "\"rgb(37, 52, 148)\"], [0.25, \"rgb(34, 94, 168)\"], [0.375, \"rgb(29, 145, 192)\"], [0.5, \"rgb(65,", "201, 97, 255)), (1.0, (253, 231, 36, 255)), ], \"inferno\": [ (0.0, (0,", "136, 97, 255)), (1.0, (251, 252, 191, 255)), ], } def make_qcodes_anglemap45(): anglemap_colorlist", "255, 0, 255) scale_low = list(scale) scale_low.insert(1, scale[0]) scale_low[0] = (0.0, clip_color) if", "(0, 0, 255, 255)), (0.75, (255, 0, 0, 255)), ], \"spectrum\": [ (1.0,", "220)\"], ], \"picnic\": [ [0, \"rgb(0,0,255)\"], [0.1, \"rgb(51,153,255)\"], [0.2, \"rgb(102,204,255)\"], [0.3, \"rgb(153,204,255)\"], [0.4,", "], \"bipolar\": [ (0.0, (0, 255, 255, 255)), (1.0, (255, 255, 0, 255)),", "= make_rgba(scale) for scale_name, scale in Gradients.items(): colorscales[scale_name] = scale for name, scale", "new colors See \"make_qcodes_anglemap\" and \"make_anglemap45_colorlist\" below to get you started. \"\"\" from", "182, 196)\"], [0.625, \"rgb(127, 205, 187)\"], [0.75, \"rgb(199, 233, 180)\"], [0.875, \"rgb(237, 248,", "137, 247)\"], [1, \"rgb(220, 220, 220)\"], ], \"picnic\": [ [0, \"rgb(0,0,255)\"], [0.1, \"rgb(51,153,255)\"],", "[0.4, \"rgb(120,0,100)\"], [0.6, \"rgb(160,90,0)\"], [0.8, \"rgb(230,200,0)\"], [1, \"rgb(255,250,220)\"], ], \"viridis\": [ [0, \"#440154\"],", "36, 255)), ], \"inferno\": [ (0.0, (0, 0, 3, 255)), (0.25, (87, 15,", "255, 255, 255))], # Perceptually uniform sequential colormaps from Matplotlib 2.0 \"viridis\": [", "x in col)))] for i, col in enumerate(anglemap_colorlist) ] return color_scale qcodes_anglemap45 =", "[0.375, \"rgb(5,255,255)\"], [0.625, \"rgb(255,255,0)\"], [0.875, \"rgb(250,0,0)\"], [1, \"rgb(128,0,0)\"], ], \"hot\": [ [0, \"rgb(0,0,0)\"],", "(0.25, (126, 3, 167, 255)), (0.5, (203, 71, 119, 255)), (0.75, (248, 149,", "200)\"], [0.25, \"rgb(0, 25, 255)\"], [0.375, \"rgb(0, 152, 255)\"], [0.5, \"rgb(44, 255, 150)\"],", "\"rgb(189, 0, 38)\"], [0.25, \"rgb(227, 26, 28)\"], [0.375, \"rgb(252, 78, 42)\"], [0.5, \"rgb(253,", "[[0, \"rgb(0,0,0)\"], [1, \"rgb(255,255,255)\"]], \"YlGnBu\": [ [0, \"rgb(8, 29, 88)\"], [0.125, \"rgb(37, 52,", "], \"portland\": [ [0, \"rgb(12,51,131)\"], [0.25, \"rgb(10,136,186)\"], [0.5, \"rgb(242,211,56)\"], [0.75, \"rgb(242,143,56)\"], [1, \"rgb(217,30,30)\"],", "[1, \"rgb(255,255,255)\"], ], \"electric\": [ [0, \"rgb(0,0,0)\"], [0.15, \"rgb(30,0,100)\"], [0.4, \"rgb(120,0,100)\"], [0.6, \"rgb(160,90,0)\"],", "0, 255)), (1.0, (255, 255, 255, 255)), (0.0, (0, 0, 0, 255)), ],", "255)), (0.5, (203, 71, 119, 255)), (0.75, (248, 149, 64, 255)), (1.0, (239,", "\"\"\" [2020-02-03] Modified version of the original qcodes.plots.colors Mofied by <NAME> for Measurement", "i, col in enumerate(anglemap_colorlist) ] return color_scale qcodes_anglemap45 = make_qcodes_anglemap45() colorscales_raw[\"anglemap45\"] = qcodes_anglemap45", "(0, 255, 0, 255) scale_low = list(scale) scale_low.insert(1, scale[0]) scale_low[0] = (0.0, clip_color)", "248, 33, 255)), ], \"magma\": [ (0.0, (0, 0, 3, 255)), (0.25, (80,", "\"#addc30\"], [0.9411764705882353, \"#d8e219\"], [1, \"#fde725\"], ], } # Extracted https://github.com/pyqtgraph/pyqtgraph/blob/develop/pyqtgraph/graphicsItems/GradientEditorItem.py Gradients = {", "supports rgb(r,g,b) and #rrggbb colors\") colorscales = {} for scale_name, scale in colorscales_raw.items():", "29, 88)\"], [0.125, \"rgb(37, 52, 148)\"], [0.25, \"rgb(34, 94, 168)\"], [0.375, \"rgb(29, 145,", "[0.2, \"rgb(230,0,0)\"], [0.4, \"rgb(230,210,0)\"], [0.7, \"rgb(255,255,255)\"], [1, \"rgb(160,200,255)\"], ], \"earth\": [ [0, \"rgb(0,0,130)\"],", "42)\"], [0.5, \"rgb(253, 141, 60)\"], [0.625, \"rgb(254, 178, 76)\"], [0.75, \"rgb(254, 217, 118)\"],", "[ [0, \"rgb(5, 10, 172)\"], [0.35, \"rgb(106, 137, 247)\"], [0.5, \"rgb(190,190,190)\"], [0.6, \"rgb(220,", "255)), (1.0, (255, 255, 0, 255)), (0.5, (0, 0, 0, 255)), (0.25, (0,", "\"rgb(65, 182, 196)\"], [0.625, \"rgb(127, 205, 187)\"], [0.75, \"rgb(199, 233, 180)\"], [0.875, \"rgb(237,", "\"rgb(230,200,0)\"], [1, \"rgb(255,250,220)\"], ], \"viridis\": [ [0, \"#440154\"], [0.06274509803921569, \"#48186a\"], [0.12549019607843137, \"#472d7b\"], [0.18823529411764706,", "(32, 0, 129, 255)), (0.8362738179251941, (255, 255, 0, 255)), (0.5257586450247, (115, 15, 255,", "\"magma\": [ (0.0, (0, 0, 3, 255)), (0.25, (80, 18, 123, 255)), (0.5,", "+ \"_clip_low\"] = scale_low scale_high = list(scale) scale_high.insert(-1, scale[-1]) scale_high[-1] = (1.0, clip_color)", "maps from the qcodes, context menu of the color bar from pyqtgraph, the", "], \"magma\": [ (0.0, (0, 0, 3, 255)), (0.25, (80, 18, 123, 255)),", "list(colorscales.items()): last_idx = len(scale) - 1 reversed_scale = [ (scale[last_idx - i][0], color[1])", "], \"viridis\": [ [0, \"#440154\"], [0.06274509803921569, \"#48186a\"], [0.12549019607843137, \"#472d7b\"], [0.18823529411764706, \"#424086\"], [0.25098039215686274, \"#3b528b\"],", "] colorscales[name + \"_reversed\"] = reversed_scale # Generate also all scales with cliping", "(7, 0, 220, 255)), (0.5, (236, 0, 134, 255)), (0.8, (246, 246, 0,", "0, 255)), (0.25, (0, 0, 255, 255)), (0.75, (255, 0, 0, 255)), ],", "8, 255)), (1.0, (252, 254, 164, 255)), ], \"plasma\": [ (0.0, (12, 7,", "\"inferno\": [ (0.0, (0, 0, 3, 255)), (0.25, (87, 15, 109, 255)), (0.5,", "free to add new colors See \"make_qcodes_anglemap\" and \"make_anglemap45_colorlist\" below to get you", "1, 84, 255)), (0.25, (58, 82, 139, 255)), (0.5, (32, 144, 140, 255)),", "a hsv, didn't patch qcodes to allow the specification of that part... #", "\"rgb(106, 137, 247)\"], [0.5, \"rgb(190,190,190)\"], [0.6, \"rgb(220, 170, 132)\"], [0.7, \"rgb(230, 145, 90)\"],", "= qcodes_anglemap45 def make_rgba(colorscale): return [(v, one_rgba(c)) for v, c in colorscale] def", "233, 180)\"], [0.875, \"rgb(237, 248, 217)\"], [1, \"rgb(255, 255, 217)\"], ], \"Greens\": [", "[0.375, \"rgb(29, 145, 192)\"], [0.5, \"rgb(65, 182, 196)\"], [0.625, \"rgb(127, 205, 187)\"], [0.75,", "[1, \"rgb(178, 10, 28)\"], ], # Scale for non-negative numeric values \"Reds\": [", "155)\"], [0.75, \"rgb(199, 233, 192)\"], [0.875, \"rgb(229, 245, 224)\"], [1, \"rgb(247, 252, 245)\"],", "\"rgb(254, 217, 118)\"], [0.875, \"rgb(255, 237, 160)\"], [1, \"rgb(255, 255, 204)\"], ], \"bluered\":", "\"rgb(255, 255, 217)\"], ], \"Greens\": [ [0, \"rgb(0, 68, 27)\"], [0.125, \"rgb(0, 109,", "\"rgb(255,255,0)\"], [0.875, \"rgb(250,0,0)\"], [1, \"rgb(128,0,0)\"], ], \"hot\": [ [0, \"rgb(0,0,0)\"], [0.3, \"rgb(230,0,0)\"], [0.6,", "make more, but for now this is just to convert plotly colorscales to", "[0.18823529411764706, \"#424086\"], [0.25098039215686274, \"#3b528b\"], [0.3137254901960784, \"#33638d\"], [0.3764705882352941, \"#2c728e\"], [0.4392156862745098, \"#26828e\"], [0.5019607843137255, \"#21918c\"], [0.5647058823529412,", "== \"rgb(\": return tuple(map(int, c[4:-1].split(\",\"))) + (255,) raise ValueError(\"one_rgba only supports rgb(r,g,b) and", "Extracted https://github.com/pyqtgraph/pyqtgraph/blob/develop/pyqtgraph/graphicsItems/GradientEditorItem.py Gradients = { \"thermal\": [ (0.3333, (185, 0, 0, 255)), (0.6666,", "\"rgb(70, 100, 245)\"], [0.6, \"rgb(90, 120, 245)\"], [0.7, \"rgb(106, 137, 247)\"], [1, \"rgb(220,", "1), \"rgb\" + repr(tuple((int(x * 255) for x in col)))] for i, col", "a) input can be an rgb string 'rgb(r,g,b)', '#rrggbb' if we decide we", "[0.15, \"rgb(30,0,100)\"], [0.4, \"rgb(120,0,100)\"], [0.6, \"rgb(160,90,0)\"], [0.8, \"rgb(230,200,0)\"], [1, \"rgb(255,250,220)\"], ], \"viridis\": [", "len(scale) - 1 reversed_scale = [ (scale[last_idx - i][0], color[1]) for i, color", "Gradients.items(): colorscales[scale_name] = scale for name, scale in list(colorscales.items()): last_idx = len(scale) -", "colorscales_raw.items(): colorscales[scale_name] = make_rgba(scale) for scale_name, scale in Gradients.items(): colorscales[scale_name] = scale for", "pink \"#7f7f7f\", # middle gray \"#bcbd22\", # curry yellow-green \"#17becf\", # blue-teal ]", "[ # (0.0, (0, 0, 0, 255)), # (0.99, (255, 255, 255, 255)),", "(0.25, (87, 15, 109, 255)), (0.5, (187, 55, 84, 255)), (0.75, (249, 142,", "for name, scale in list(colorscales.items()): last_idx = len(scale) - 1 reversed_scale = [", "value to (r, g, b, a) input can be an rgb string 'rgb(r,g,b)',", "cliping at green for name, scale in list(colorscales.items()): clip_percent = 0.03 clip_color =", "# default colors and colorscales, taken from plotly color_cycle = [ \"#1f77b4\", #", "#rrggbb colors\") colorscales = {} for scale_name, scale in colorscales_raw.items(): colorscales[scale_name] = make_rgba(scale)", "# www.sandia.gov/~kmorel/documents/ColorMaps/ColorMapsExpanded.pdf \"RdBu\": [ [0, \"rgb(5, 10, 172)\"], [0.35, \"rgb(106, 137, 247)\"], [0.5,", "(0.99, (255, 255, 255, 255)), # (1.0, (255, 0, 0, 255)), # ],", "get you started. \"\"\" from pycqed.analysis.tools.plotting import make_anglemap45_colorlist # default colors and colorscales,", "created by me (Victo), and the reversed version of all of them. Feel", "[0.6274509803921569, \"#28ae80\"], [0.6901960784313725, \"#3fbc73\"], [0.7529411764705882, \"#5ec962\"], [0.8156862745098039, \"#84d44b\"], [0.8784313725490196, \"#addc30\"], [0.9411764705882353, \"#d8e219\"], [1,", "# modified RdBu based on # www.sandia.gov/~kmorel/documents/ColorMaps/ColorMapsExpanded.pdf \"RdBu\": [ [0, \"rgb(5, 10, 172)\"],", "the circular colormap created by me (Victo), and the reversed version of all", "\"rgb(255, 237, 160)\"], [1, \"rgb(255, 255, 204)\"], ], \"bluered\": [[0, \"rgb(0,0,255)\"], [1, \"rgb(255,0,0)\"]],", "195, 157)\"], [0.4, \"rgb(245, 160, 105)\"], [1, \"rgb(178, 10, 28)\"], ], # Scale", "0, 255) scale_low = list(scale) scale_low.insert(1, scale[0]) scale_low[0] = (0.0, clip_color) if scale[1][0]", "0, 255)), (1.0, (255, 255, 255, 255))], # Perceptually uniform sequential colormaps from", "\"rgb(5, 10, 172)\"], [0.35, \"rgb(106, 137, 247)\"], [0.5, \"rgb(190,190,190)\"], [0.6, \"rgb(220, 170, 132)\"],", "172)\"], [0.35, \"rgb(40, 60, 190)\"], [0.5, \"rgb(70, 100, 245)\"], [0.6, \"rgb(90, 120, 245)\"],", "196, 118)\"], [0.625, \"rgb(161, 217, 155)\"], [0.75, \"rgb(199, 233, 192)\"], [0.875, \"rgb(229, 245,", "[0.75, \"rgb(242,143,56)\"], [1, \"rgb(217,30,30)\"], ], \"jet\": [ [0, \"rgb(0,0,131)\"], [0.125, \"rgb(0,60,170)\"], [0.375, \"rgb(5,255,255)\"],", "brown \"#e377c2\", # raspberry yogurt pink \"#7f7f7f\", # middle gray \"#bcbd22\", # curry", "\"rgb(34, 94, 168)\"], [0.375, \"rgb(29, 145, 192)\"], [0.5, \"rgb(65, 182, 196)\"], [0.625, \"rgb(127,", "didn't patch qcodes to allow the specification of that part... \"cyclic\": [ (0.0,", "allow the specification of that part... # \"greyclip\": [ # (0.0, (0, 0,", "Control It modules makes available all the colors maps from the qcodes, context", "123, 255)), (0.5, (182, 54, 121, 255)), (0.75, (251, 136, 97, 255)), (1.0,", "(1.0, (251, 252, 191, 255)), ], } def make_qcodes_anglemap45(): anglemap_colorlist = make_anglemap45_colorlist(N=9, use_hpl=False)", "[0.5019607843137255, \"#21918c\"], [0.5647058823529412, \"#1fa088\"], [0.6274509803921569, \"#28ae80\"], [0.6901960784313725, \"#3fbc73\"], [0.7529411764705882, \"#5ec962\"], [0.8156862745098039, \"#84d44b\"], [0.8784313725490196,", "[[0, \"rgb(0,0,255)\"], [1, \"rgb(255,0,0)\"]], # modified RdBu based on # www.sandia.gov/~kmorel/documents/ColorMaps/ColorMapsExpanded.pdf \"RdBu\": [", "for scale_name, scale in colorscales_raw.items(): colorscales[scale_name] = make_rgba(scale) for scale_name, scale in Gradients.items():", "(0.0, (0, 0, 0, 255)), # (0.99, (255, 255, 255, 255)), # (1.0,", "145, 90)\"], [1, \"rgb(178, 10, 28)\"], ], # Scale for non-negative numeric values", "make_qcodes_anglemap45(): anglemap_colorlist = make_anglemap45_colorlist(N=9, use_hpl=False) len_colorlist = len(anglemap_colorlist) color_scale = [ [i /", "\"rgb\" + repr(tuple((int(x * 255) for x in col)))] for i, col in", "colorscales[name + \"_reversed\"] = reversed_scale # Generate also all scales with cliping at", "(255, 255, 0, 255)), (0.5, (0, 0, 0, 255)), (0.25, (0, 0, 255,", "(0.75, (255, 0, 0, 255)), ], \"spectrum\": [ (1.0, (255, 0, 255, 255)),", "# middle gray \"#bcbd22\", # curry yellow-green \"#17becf\", # blue-teal ] colorscales_raw =", "[1, \"rgb(220, 220, 220)\"], ], \"picnic\": [ [0, \"rgb(0,0,255)\"], [0.1, \"rgb(51,153,255)\"], [0.2, \"rgb(102,204,255)\"],", "ValueError(\"one_rgba only supports rgb(r,g,b) and #rrggbb colors\") colorscales = {} for scale_name, scale", "0, 134, 255)), (0.8, (246, 246, 0, 255)), (1.0, (255, 255, 255, 255)),", "246, 0, 255)), (1.0, (255, 255, 255, 255)), (0.0, (0, 0, 0, 255)),", "Feel free to add new colors See \"make_qcodes_anglemap\" and \"make_anglemap45_colorlist\" below to get", "\"\"\" convert a single color value to (r, g, b, a) input can", "Modified version of the original qcodes.plots.colors Mofied by <NAME> for Measurement Control It", "'rgb(r,g,b)', '#rrggbb' if we decide we want more we can make more, but", "scale_name, scale in colorscales_raw.items(): colorscales[scale_name] = make_rgba(scale) for scale_name, scale in Gradients.items(): colorscales[scale_name]", "38)\"], [0.25, \"rgb(227, 26, 28)\"], [0.375, \"rgb(252, 78, 42)\"], [0.5, \"rgb(253, 141, 60)\"],", "84, 255)), (0.25, (58, 82, 139, 255)), (0.5, (32, 144, 140, 255)), (0.75,", "scale_high[-2] = ((scale[-1][0] + scale[-2][0]) / 2, scale_high[-2][1]) else: scale_high[-2] = (1 -", "yogurt pink \"#7f7f7f\", # middle gray \"#bcbd22\", # curry yellow-green \"#17becf\", # blue-teal", "scale[0]) scale_low[0] = (0.0, clip_color) if scale[1][0] < clip_percent: scale_low[1] = ((scale[1][0] +", "0)\"], ], \"portland\": [ [0, \"rgb(12,51,131)\"], [0.25, \"rgb(10,136,186)\"], [0.5, \"rgb(242,211,56)\"], [0.75, \"rgb(242,143,56)\"], [1,", "[1, \"rgb(160,200,255)\"], ], \"earth\": [ [0, \"rgb(0,0,130)\"], [0.1, \"rgb(0,180,180)\"], [0.2, \"rgb(40,210,40)\"], [0.4, \"rgb(230,230,50)\"],", "list(colorscales.items()): clip_percent = 0.03 clip_color = (0, 255, 0, 255) scale_low = list(scale)", "bar from pyqtgraph, the circular colormap created by me (Victo), and the reversed", "string 'rgb(r,g,b)', '#rrggbb' if we decide we want more we can make more,", "reversed_scale = [ (scale[last_idx - i][0], color[1]) for i, color in enumerate(scale) ]", "], \"grey\": [(0.0, (0, 0, 0, 255)), (1.0, (255, 255, 255, 255))], #", "3, 255)), (0.25, (80, 18, 123, 255)), (0.5, (182, 54, 121, 255)), (0.75,", "scale_high[-1] = (1.0, clip_color) if scale[-2][0] > 1 - clip_percent: scale_high[-2] = ((scale[-1][0]", "scale[-1]) scale_high[-1] = (1.0, clip_color) if scale[-2][0] > 1 - clip_percent: scale_high[-2] =", "[0.4392156862745098, \"#26828e\"], [0.5019607843137255, \"#21918c\"], [0.5647058823529412, \"#1fa088\"], [0.6274509803921569, \"#28ae80\"], [0.6901960784313725, \"#3fbc73\"], [0.7529411764705882, \"#5ec962\"], [0.8156862745098039,", "colorscales_raw[\"anglemap45\"] = qcodes_anglemap45 def make_rgba(colorscale): return [(v, one_rgba(c)) for v, c in colorscale]", "[0.25, \"rgb(34, 94, 168)\"], [0.375, \"rgb(29, 145, 192)\"], [0.5, \"rgb(65, 182, 196)\"], [0.625,", "return [(v, one_rgba(c)) for v, c in colorscale] def one_rgba(c): \"\"\" convert a", "hsv, didn't patch qcodes to allow the specification of that part... # \"greyclip\":", "[0.8, \"rgb(255,102,204)\"], [0.9, \"rgb(255,102,102)\"], [1, \"rgb(255,0,0)\"], ], \"rainbow\": [ [0, \"rgb(150,0,90)\"], [0.125, \"rgb(0,", "for Measurement Control It modules makes available all the colors maps from the", "qcodes.plots.colors Mofied by <NAME> for Measurement Control It modules makes available all the", "red \"#9467bd\", # muted purple \"#8c564b\", # chestnut brown \"#e377c2\", # raspberry yogurt", "return color_scale qcodes_anglemap45 = make_qcodes_anglemap45() colorscales_raw[\"anglemap45\"] = qcodes_anglemap45 def make_rgba(colorscale): return [(v, one_rgba(c))", "\"\"\" if c[0] == \"#\" and len(c) == 7: return (int(c[1:3], 16), int(c[3:5],", "148)\"], [0.25, \"rgb(34, 94, 168)\"], [0.375, \"rgb(29, 145, 192)\"], [0.5, \"rgb(65, 182, 196)\"],", "76)\"], [0.75, \"rgb(254, 217, 118)\"], [0.875, \"rgb(255, 237, 160)\"], [1, \"rgb(255, 255, 204)\"],", "26, 28)\"], [0.375, \"rgb(252, 78, 42)\"], [0.5, \"rgb(253, 141, 60)\"], [0.625, \"rgb(254, 178,", "(182, 54, 121, 255)), (0.75, (251, 136, 97, 255)), (1.0, (251, 252, 191,", "< clip_percent: scale_low[1] = ((scale[1][0] + scale[0][0]) / 2, scale_low[1][1]) else: scale_low[1] =", "141, 60)\"], [0.625, \"rgb(254, 178, 76)\"], [0.75, \"rgb(254, 217, 118)\"], [0.875, \"rgb(255, 237,", "green for name, scale in list(colorscales.items()): clip_percent = 0.03 clip_color = (0, 255,", "for i, color in enumerate(scale) ] colorscales[name + \"_reversed\"] = reversed_scale # Generate", "[0.125, \"rgb(189, 0, 38)\"], [0.25, \"rgb(227, 26, 28)\"], [0.375, \"rgb(252, 78, 42)\"], [0.5,", "170, 132)\"], [0.7, \"rgb(230, 145, 90)\"], [1, \"rgb(178, 10, 28)\"], ], # Scale", "clip_percent = 0.03 clip_color = (0, 255, 0, 255) scale_low = list(scale) scale_low.insert(1,", "164, 255)), ], \"plasma\": [ (0.0, (12, 7, 134, 255)), (0.25, (126, 3,", "[(v, one_rgba(c)) for v, c in colorscale] def one_rgba(c): \"\"\" convert a single", "non-positive numeric values \"Blues\": [ [0, \"rgb(5, 10, 172)\"], [0.35, \"rgb(40, 60, 190)\"],", "255)), (0.25, (126, 3, 167, 255)), (0.5, (203, 71, 119, 255)), (0.75, (248,", "255)), ], \"spectrum\": [ (1.0, (255, 0, 255, 255)), (0.0, (255, 0, 0,", "(255, 255, 255, 255)), (0.0, (0, 0, 0, 255)), ], \"yellowy\": [ (0.0,", "[0, \"rgb(5, 10, 172)\"], [0.35, \"rgb(40, 60, 190)\"], [0.5, \"rgb(70, 100, 245)\"], [0.6,", "0, 0, 255)), # (0.99, (255, 255, 255, 255)), # (1.0, (255, 0,", "0, 220, 255)), (0.5, (236, 0, 134, 255)), (0.8, (246, 246, 0, 255)),", "\"#2c728e\"], [0.4392156862745098, \"#26828e\"], [0.5019607843137255, \"#21918c\"], [0.5647058823529412, \"#1fa088\"], [0.6274509803921569, \"#28ae80\"], [0.6901960784313725, \"#3fbc73\"], [0.7529411764705882, \"#5ec962\"],", "255)), (0, (0, 0, 0, 255)), ], \"flame\": [ (0.2, (7, 0, 220,", "0, 129, 255)), (0.8362738179251941, (255, 255, 0, 255)), (0.5257586450247, (115, 15, 255, 255)),", "for name, scale in list(colorscales.items()): clip_percent = 0.03 clip_color = (0, 255, 0,", "\"rgb(255,255,255)\"], [0.6, \"rgb(255,204,255)\"], [0.7, \"rgb(255,153,255)\"], [0.8, \"rgb(255,102,204)\"], [0.9, \"rgb(255,102,102)\"], [1, \"rgb(255,0,0)\"], ], \"rainbow\":", "[0.5, \"rgb(242,211,56)\"], [0.75, \"rgb(242,143,56)\"], [1, \"rgb(217,30,30)\"], ], \"jet\": [ [0, \"rgb(0,0,131)\"], [0.125, \"rgb(0,60,170)\"],", "(255, 220, 0, 255)), (1, (255, 255, 255, 255)), (0, (0, 0, 0,", "(1.0, (255, 0, 255, 255)), (0.0, (255, 0, 0, 255)), ], # this", "0, 255)), (0.5, (0, 0, 0, 255)), (0.25, (0, 0, 255, 255)), (0.75,", "(255, 255, 0, 255)), (0.5257586450247, (115, 15, 255, 255)), (1.0, (255, 255, 255,", "0, 255)), (0.5257586450247, (115, 15, 255, 255)), (1.0, (255, 255, 255, 255)), ],", "255)), (0.5, (182, 54, 121, 255)), (0.75, (251, 136, 97, 255)), (1.0, (251,", "Gradients = { \"thermal\": [ (0.3333, (185, 0, 0, 255)), (0.6666, (255, 220,", "\"make_qcodes_anglemap\" and \"make_anglemap45_colorlist\" below to get you started. \"\"\" from pycqed.analysis.tools.plotting import make_anglemap45_colorlist", "(255, 255, 255, 255))], # Perceptually uniform sequential colormaps from Matplotlib 2.0 \"viridis\":", "118)\"], [0.875, \"rgb(255, 237, 160)\"], [1, \"rgb(255, 255, 204)\"], ], \"bluered\": [[0, \"rgb(0,0,255)\"],", "sequential colormaps from Matplotlib 2.0 \"viridis\": [ (0.0, (68, 1, 84, 255)), (0.25,", "that part... # \"greyclip\": [ # (0.0, (0, 0, 0, 255)), # (0.99,", "pycqed.analysis.tools.plotting import make_anglemap45_colorlist # default colors and colorscales, taken from plotly color_cycle =", "255)), (0.8362738179251941, (255, 255, 0, 255)), (0.5257586450247, (115, 15, 255, 255)), (1.0, (255,", "100, 245)\"], [0.6, \"rgb(90, 120, 245)\"], [0.7, \"rgb(106, 137, 247)\"], [1, \"rgb(220, 220,", "of that part... \"cyclic\": [ (0.0, (255, 0, 4, 255)), (1.0, (255, 0,", "(239, 248, 33, 255)), ], \"magma\": [ (0.0, (0, 0, 3, 255)), (0.25,", "38)\"], [0.125, \"rgb(189, 0, 38)\"], [0.25, \"rgb(227, 26, 28)\"], [0.375, \"rgb(252, 78, 42)\"],", "= {} for scale_name, scale in colorscales_raw.items(): colorscales[scale_name] = make_rgba(scale) for scale_name, scale", "add new colors See \"make_qcodes_anglemap\" and \"make_anglemap45_colorlist\" below to get you started. \"\"\"", "- i][0], color[1]) for i, color in enumerate(scale) ] colorscales[name + \"_reversed\"] =", "\"#9467bd\", # muted purple \"#8c564b\", # chestnut brown \"#e377c2\", # raspberry yogurt pink", "(0, 0, 0, 255)), (1.0, (255, 255, 255, 255))], # Perceptually uniform sequential", "150)\"], [0.625, \"rgb(151, 255, 0)\"], [0.75, \"rgb(255, 234, 0)\"], [0.875, \"rgb(255, 111, 0)\"],", "255, 255, 255)), (1.0, (255, 255, 0, 255)), (0.5, (0, 0, 0, 255)),", "160, 105)\"], [1, \"rgb(178, 10, 28)\"], ], # Scale for non-positive numeric values", "68, 27)\"], [0.125, \"rgb(0, 109, 44)\"], [0.25, \"rgb(35, 139, 69)\"], [0.375, \"rgb(65, 171,", "colorscales[name + \"_clip_low\"] = scale_low scale_high = list(scale) scale_high.insert(-1, scale[-1]) scale_high[-1] = (1.0,", "0, 0, 255)), ], \"flame\": [ (0.2, (7, 0, 220, 255)), (0.5, (236,", "# cooked asparagus green \"#d62728\", # brick red \"#9467bd\", # muted purple \"#8c564b\",", "colormap created by me (Victo), and the reversed version of all of them.", "color value to (r, g, b, a) input can be an rgb string", "255)), ], # this is a hsv, didn't patch qcodes to allow the", "139, 69)\"], [0.375, \"rgb(65, 171, 93)\"], [0.5, \"rgb(116, 196, 118)\"], [0.625, \"rgb(161, 217,", "252, 245)\"], ], \"YlOrRd\": [ [0, \"rgb(128, 0, 38)\"], [0.125, \"rgb(189, 0, 38)\"],", "255)), (0.25, (87, 15, 109, 255)), (0.5, (187, 55, 84, 255)), (0.75, (249,", "[1, \"rgb(128,0,0)\"], ], \"hot\": [ [0, \"rgb(0,0,0)\"], [0.3, \"rgb(230,0,0)\"], [0.6, \"rgb(255,210,0)\"], [1, \"rgb(255,255,255)\"],", "\"rgb(44, 255, 150)\"], [0.625, \"rgb(151, 255, 0)\"], [0.75, \"rgb(255, 234, 0)\"], [0.875, \"rgb(255,", "taken from plotly color_cycle = [ \"#1f77b4\", # muted blue \"#ff7f0e\", # safety", "[0, \"rgb(0,0,0)\"], [0.2, \"rgb(230,0,0)\"], [0.4, \"rgb(230,210,0)\"], [0.7, \"rgb(255,255,255)\"], [1, \"rgb(160,200,255)\"], ], \"earth\": [", "{ \"thermal\": [ (0.3333, (185, 0, 0, 255)), (0.6666, (255, 220, 0, 255)),", "repr(tuple((int(x * 255) for x in col)))] for i, col in enumerate(anglemap_colorlist) ]", "(251, 252, 191, 255)), ], } def make_qcodes_anglemap45(): anglemap_colorlist = make_anglemap45_colorlist(N=9, use_hpl=False) len_colorlist", "\"rgb(128, 0, 38)\"], [0.125, \"rgb(189, 0, 38)\"], [0.25, \"rgb(227, 26, 28)\"], [0.375, \"rgb(252,", "\"rgb(255,153,255)\"], [0.8, \"rgb(255,102,204)\"], [0.9, \"rgb(255,102,102)\"], [1, \"rgb(255,0,0)\"], ], \"rainbow\": [ [0, \"rgb(150,0,90)\"], [0.125,", "255)), (1.0, (255, 0, 0, 255)), ], # this is a hsv, didn't", "[0.6, \"rgb(90, 120, 245)\"], [0.7, \"rgb(106, 137, 247)\"], [1, \"rgb(220, 220, 220)\"], ],", "\"rgb(178, 10, 28)\"], ], # Scale for non-positive numeric values \"Blues\": [ [0,", "\"rgb(161, 217, 155)\"], [0.75, \"rgb(199, 233, 192)\"], [0.875, \"rgb(229, 245, 224)\"], [1, \"rgb(247,", "= [ \"#1f77b4\", # muted blue \"#ff7f0e\", # safety orange \"#2ca02c\", # cooked", "that part... \"cyclic\": [ (0.0, (255, 0, 4, 255)), (1.0, (255, 0, 0,", "the specification of that part... # \"greyclip\": [ # (0.0, (0, 0, 0,", "220, 0, 255)), (1, (255, 255, 255, 255)), (0, (0, 0, 0, 255)),", "(0.5, (187, 55, 84, 255)), (0.75, (249, 142, 8, 255)), (1.0, (252, 254,", "if c[:4] == \"rgb(\": return tuple(map(int, c[4:-1].split(\",\"))) + (255,) raise ValueError(\"one_rgba only supports", "c in colorscale] def one_rgba(c): \"\"\" convert a single color value to (r,", "[ [0, \"rgb(0,0,0)\"], [0.3, \"rgb(230,0,0)\"], [0.6, \"rgb(255,210,0)\"], [1, \"rgb(255,255,255)\"], ], \"blackbody\": [ [0,", "of all of them. Feel free to add new colors See \"make_qcodes_anglemap\" and", "2, scale_low[1][1]) else: scale_low[1] = (clip_percent, scale_low[1][1]) colorscales[name + \"_clip_low\"] = scale_low scale_high", "0, 255)), ], # this is a hsv, didn't patch qcodes to allow", "(0.5, (0, 0, 0, 255)), (0.25, (0, 0, 255, 255)), (0.75, (255, 0,", "green \"#d62728\", # brick red \"#9467bd\", # muted purple \"#8c564b\", # chestnut brown", "0, 255)), ], \"yellowy\": [ (0.0, (0, 0, 0, 255)), (0.2328863796753704, (32, 0,", "[0.75, \"rgb(254, 217, 118)\"], [0.875, \"rgb(255, 237, 160)\"], [1, \"rgb(255, 255, 204)\"], ],", "0, 0, 255)), (1.0, (255, 255, 255, 255))], # Perceptually uniform sequential colormaps", "if scale[-2][0] > 1 - clip_percent: scale_high[-2] = ((scale[-1][0] + scale[-2][0]) / 2,", "255, 255, 255)), (0.0, (0, 0, 0, 255)), ], \"yellowy\": [ (0.0, (0,", "10, 28)\"], ], # Scale for non-positive numeric values \"Blues\": [ [0, \"rgb(5,", "- 1), \"rgb\" + repr(tuple((int(x * 255) for x in col)))] for i,", "(0.8, (246, 246, 0, 255)), (1.0, (255, 255, 255, 255)), (0.0, (0, 0,", "28)\"], [0.375, \"rgb(252, 78, 42)\"], [0.5, \"rgb(253, 141, 60)\"], [0.625, \"rgb(254, 178, 76)\"],", "\"rgb(204,204,255)\"], [0.5, \"rgb(255,255,255)\"], [0.6, \"rgb(255,204,255)\"], [0.7, \"rgb(255,153,255)\"], [0.8, \"rgb(255,102,204)\"], [0.9, \"rgb(255,102,102)\"], [1, \"rgb(255,0,0)\"],", "(0.75, (94, 201, 97, 255)), (1.0, (253, 231, 36, 255)), ], \"inferno\": [", "at green for name, scale in list(colorscales.items()): clip_percent = 0.03 clip_color = (0,", "(0.75, (248, 149, 64, 255)), (1.0, (239, 248, 33, 255)), ], \"magma\": [", "to pyqtgraph tuples \"\"\" if c[0] == \"#\" and len(c) == 7: return", "\"#440154\"], [0.06274509803921569, \"#48186a\"], [0.12549019607843137, \"#472d7b\"], [0.18823529411764706, \"#424086\"], [0.25098039215686274, \"#3b528b\"], [0.3137254901960784, \"#33638d\"], [0.3764705882352941, \"#2c728e\"],", "convert plotly colorscales to pyqtgraph tuples \"\"\" if c[0] == \"#\" and len(c)", "we want more we can make more, but for now this is just", "is just to convert plotly colorscales to pyqtgraph tuples \"\"\" if c[0] ==", "247)\"], [1, \"rgb(220, 220, 220)\"], ], \"picnic\": [ [0, \"rgb(0,0,255)\"], [0.1, \"rgb(51,153,255)\"], [0.2,", "len_colorlist = len(anglemap_colorlist) color_scale = [ [i / (len_colorlist - 1), \"rgb\" +", "else: scale_low[1] = (clip_percent, scale_low[1][1]) colorscales[name + \"_clip_low\"] = scale_low scale_high = list(scale)", "0, 200)\"], [0.25, \"rgb(0, 25, 255)\"], [0.375, \"rgb(0, 152, 255)\"], [0.5, \"rgb(44, 255,", "0, 255, 255)), (0.0, (255, 0, 0, 255)), ], # this is a", "], \"plasma\": [ (0.0, (12, 7, 134, 255)), (0.25, (126, 3, 167, 255)),", "enumerate(anglemap_colorlist) ] return color_scale qcodes_anglemap45 = make_qcodes_anglemap45() colorscales_raw[\"anglemap45\"] = qcodes_anglemap45 def make_rgba(colorscale): return", "for non-positive numeric values \"Blues\": [ [0, \"rgb(5, 10, 172)\"], [0.35, \"rgb(40, 60,", "# Generate also all scales with cliping at green for name, scale in", "\"rgb(199, 233, 180)\"], [0.875, \"rgb(237, 248, 217)\"], [1, \"rgb(255, 255, 217)\"], ], \"Greens\":", "255, 255)), (0.0, (255, 0, 0, 255)), ], # this is a hsv,", "\"make_anglemap45_colorlist\" below to get you started. \"\"\" from pycqed.analysis.tools.plotting import make_anglemap45_colorlist # default", "255)), (0.6666, (255, 220, 0, 255)), (1, (255, 255, 255, 255)), (0, (0,", "255)), (0.25, (58, 82, 139, 255)), (0.5, (32, 144, 140, 255)), (0.75, (94,", "139, 255)), (0.5, (32, 144, 140, 255)), (0.75, (94, 201, 97, 255)), (1.0,", "220, 220)\"], [0.2, \"rgb(245, 195, 157)\"], [0.4, \"rgb(245, 160, 105)\"], [1, \"rgb(178, 10,", "255)\"], [0.5, \"rgb(44, 255, 150)\"], [0.625, \"rgb(151, 255, 0)\"], [0.75, \"rgb(255, 234, 0)\"],", "84, 255)), (0.75, (249, 142, 8, 255)), (1.0, (252, 254, 164, 255)), ],", "# raspberry yogurt pink \"#7f7f7f\", # middle gray \"#bcbd22\", # curry yellow-green \"#17becf\",", "name, scale in list(colorscales.items()): clip_percent = 0.03 clip_color = (0, 255, 0, 255)", "list(scale) scale_high.insert(-1, scale[-1]) scale_high[-1] = (1.0, clip_color) if scale[-2][0] > 1 - clip_percent:", "2, scale_high[-2][1]) else: scale_high[-2] = (1 - clip_percent, scale_high[-2][1]) colorscales[name + \"_clip_high\"] =", "scale_high[-2][1]) else: scale_high[-2] = (1 - clip_percent, scale_high[-2][1]) colorscales[name + \"_clip_high\"] = scale_high", "i][0], color[1]) for i, color in enumerate(scale) ] colorscales[name + \"_reversed\"] = reversed_scale", "255)), (0.25, (0, 0, 255, 255)), (0.75, (255, 0, 0, 255)), ], \"spectrum\":", "109, 255)), (0.5, (187, 55, 84, 255)), (0.75, (249, 142, 8, 255)), (1.0,", "(0.25, (80, 18, 123, 255)), (0.5, (182, 54, 121, 255)), (0.75, (251, 136,", "the colors maps from the qcodes, context menu of the color bar from", "255)), (0.75, (255, 0, 0, 255)), ], \"spectrum\": [ (1.0, (255, 0, 255,", "len(anglemap_colorlist) color_scale = [ [i / (len_colorlist - 1), \"rgb\" + repr(tuple((int(x *", "now this is just to convert plotly colorscales to pyqtgraph tuples \"\"\" if", "[0.375, \"rgb(252, 78, 42)\"], [0.5, \"rgb(253, 141, 60)\"], [0.625, \"rgb(254, 178, 76)\"], [0.75,", "specification of that part... # \"greyclip\": [ # (0.0, (0, 0, 0, 255)),", "\"#21918c\"], [0.5647058823529412, \"#1fa088\"], [0.6274509803921569, \"#28ae80\"], [0.6901960784313725, \"#3fbc73\"], [0.7529411764705882, \"#5ec962\"], [0.8156862745098039, \"#84d44b\"], [0.8784313725490196, \"#addc30\"],", "one_rgba(c): \"\"\" convert a single color value to (r, g, b, a) input", "(1.0, (255, 255, 255, 255)), (0.0, (0, 0, 0, 255)), ], \"yellowy\": [", "\"rgb(51,153,255)\"], [0.2, \"rgb(102,204,255)\"], [0.3, \"rgb(153,204,255)\"], [0.4, \"rgb(204,204,255)\"], [0.5, \"rgb(255,255,255)\"], [0.6, \"rgb(255,204,255)\"], [0.7, \"rgb(255,153,255)\"],", "\"rgb(35, 139, 69)\"], [0.375, \"rgb(65, 171, 93)\"], [0.5, \"rgb(116, 196, 118)\"], [0.625, \"rgb(161,", "\"rgb(252, 78, 42)\"], [0.5, \"rgb(253, 141, 60)\"], [0.625, \"rgb(254, 178, 76)\"], [0.75, \"rgb(254,", "220)\"], [0.2, \"rgb(245, 195, 157)\"], [0.4, \"rgb(245, 160, 105)\"], [1, \"rgb(178, 10, 28)\"],", "\"#8c564b\", # chestnut brown \"#e377c2\", # raspberry yogurt pink \"#7f7f7f\", # middle gray", "\"rgb(0, 25, 255)\"], [0.375, \"rgb(0, 152, 255)\"], [0.5, \"rgb(44, 255, 150)\"], [0.625, \"rgb(151,", "0, 0, 255)), # ], \"grey\": [(0.0, (0, 0, 0, 255)), (1.0, (255,", "\"rgb(255,255,255)\"]], \"YlGnBu\": [ [0, \"rgb(8, 29, 88)\"], [0.125, \"rgb(37, 52, 148)\"], [0.25, \"rgb(34,", "\"rgb(255, 0, 0)\"], ], \"portland\": [ [0, \"rgb(12,51,131)\"], [0.25, \"rgb(10,136,186)\"], [0.5, \"rgb(242,211,56)\"], [0.75,", "15, 255, 255)), (1.0, (255, 255, 255, 255)), ], \"bipolar\": [ (0.0, (0,", "(255, 0, 0, 255)), ], \"spectrum\": [ (1.0, (255, 0, 255, 255)), (0.0,", "\"cyclic\": [ (0.0, (255, 0, 4, 255)), (1.0, (255, 0, 0, 255)), ],", "\"rgb(102,204,255)\"], [0.3, \"rgb(153,204,255)\"], [0.4, \"rgb(204,204,255)\"], [0.5, \"rgb(255,255,255)\"], [0.6, \"rgb(255,204,255)\"], [0.7, \"rgb(255,153,255)\"], [0.8, \"rgb(255,102,204)\"],", "], \"YlOrRd\": [ [0, \"rgb(128, 0, 38)\"], [0.125, \"rgb(189, 0, 38)\"], [0.25, \"rgb(227,", "\"rgb(0,0,0)\"], [0.15, \"rgb(30,0,100)\"], [0.4, \"rgb(120,0,100)\"], [0.6, \"rgb(160,90,0)\"], [0.8, \"rgb(230,200,0)\"], [1, \"rgb(255,250,220)\"], ], \"viridis\":", "234, 0)\"], [0.875, \"rgb(255, 111, 0)\"], [1, \"rgb(255, 0, 0)\"], ], \"portland\": [", "and \"make_anglemap45_colorlist\" below to get you started. \"\"\" from pycqed.analysis.tools.plotting import make_anglemap45_colorlist #", "\"rgb(0,0,0)\"], [1, \"rgb(255,255,255)\"]], \"YlGnBu\": [ [0, \"rgb(8, 29, 88)\"], [0.125, \"rgb(37, 52, 148)\"],", "217, 155)\"], [0.75, \"rgb(199, 233, 192)\"], [0.875, \"rgb(229, 245, 224)\"], [1, \"rgb(247, 252,", "\"Blues\": [ [0, \"rgb(5, 10, 172)\"], [0.35, \"rgb(40, 60, 190)\"], [0.5, \"rgb(70, 100,", "\"#424086\"], [0.25098039215686274, \"#3b528b\"], [0.3137254901960784, \"#33638d\"], [0.3764705882352941, \"#2c728e\"], [0.4392156862745098, \"#26828e\"], [0.5019607843137255, \"#21918c\"], [0.5647058823529412, \"#1fa088\"],", "], # Scale for non-positive numeric values \"Blues\": [ [0, \"rgb(5, 10, 172)\"],", "# (1.0, (255, 0, 0, 255)), # ], \"grey\": [(0.0, (0, 0, 0,", "[ [0, \"rgb(0, 68, 27)\"], [0.125, \"rgb(0, 109, 44)\"], [0.25, \"rgb(35, 139, 69)\"],", "(0.0, (0, 0, 0, 255)), (0.2328863796753704, (32, 0, 129, 255)), (0.8362738179251941, (255, 255,", "[0.125, \"rgb(0, 109, 44)\"], [0.25, \"rgb(35, 139, 69)\"], [0.375, \"rgb(65, 171, 93)\"], [0.5,", "(1.0, (255, 0, 0, 255)), ], # this is a hsv, didn't patch", "], \"jet\": [ [0, \"rgb(0,0,131)\"], [0.125, \"rgb(0,60,170)\"], [0.375, \"rgb(5,255,255)\"], [0.625, \"rgb(255,255,0)\"], [0.875, \"rgb(250,0,0)\"],", "], \"blackbody\": [ [0, \"rgb(0,0,0)\"], [0.2, \"rgb(230,0,0)\"], [0.4, \"rgb(230,210,0)\"], [0.7, \"rgb(255,255,255)\"], [1, \"rgb(160,200,255)\"],", "[0.4, \"rgb(245, 160, 105)\"], [1, \"rgb(178, 10, 28)\"], ], # Scale for non-positive", "\"rgb(160,90,0)\"], [0.8, \"rgb(230,200,0)\"], [1, \"rgb(255,250,220)\"], ], \"viridis\": [ [0, \"#440154\"], [0.06274509803921569, \"#48186a\"], [0.12549019607843137,", "\"rgb(237, 248, 217)\"], [1, \"rgb(255, 255, 217)\"], ], \"Greens\": [ [0, \"rgb(0, 68,", "16), int(c[3:5], 16), int(c[5:7], 16), 255) if c[:4] == \"rgb(\": return tuple(map(int, c[4:-1].split(\",\")))", "Matplotlib 2.0 \"viridis\": [ (0.0, (68, 1, 84, 255)), (0.25, (58, 82, 139,", "[0.625, \"rgb(161, 217, 155)\"], [0.75, \"rgb(199, 233, 192)\"], [0.875, \"rgb(229, 245, 224)\"], [1,", "Mofied by <NAME> for Measurement Control It modules makes available all the colors", "[ [0, \"rgb(220, 220, 220)\"], [0.2, \"rgb(245, 195, 157)\"], [0.4, \"rgb(245, 160, 105)\"],", "(0.25, (58, 82, 139, 255)), (0.5, (32, 144, 140, 255)), (0.75, (94, 201,", "but for now this is just to convert plotly colorscales to pyqtgraph tuples", "for now this is just to convert plotly colorscales to pyqtgraph tuples \"\"\"", "255)), ], \"magma\": [ (0.0, (0, 0, 3, 255)), (0.25, (80, 18, 123,", "[0, \"rgb(0, 68, 27)\"], [0.125, \"rgb(0, 109, 44)\"], [0.25, \"rgb(35, 139, 69)\"], [0.375,", "], \"flame\": [ (0.2, (7, 0, 220, 255)), (0.5, (236, 0, 134, 255)),", "available all the colors maps from the qcodes, context menu of the color", "Scale for non-negative numeric values \"Reds\": [ [0, \"rgb(220, 220, 220)\"], [0.2, \"rgb(245,", "(248, 149, 64, 255)), (1.0, (239, 248, 33, 255)), ], \"magma\": [ (0.0,", "== 7: return (int(c[1:3], 16), int(c[3:5], 16), int(c[5:7], 16), 255) if c[:4] ==", "255)\"], [0.375, \"rgb(0, 152, 255)\"], [0.5, \"rgb(44, 255, 150)\"], [0.625, \"rgb(151, 255, 0)\"],", "(scale[last_idx - i][0], color[1]) for i, color in enumerate(scale) ] colorscales[name + \"_reversed\"]", "the color bar from pyqtgraph, the circular colormap created by me (Victo), and", "\"plasma\": [ (0.0, (12, 7, 134, 255)), (0.25, (126, 3, 167, 255)), (0.5,", "be an rgb string 'rgb(r,g,b)', '#rrggbb' if we decide we want more we", "\"rgb(227, 26, 28)\"], [0.375, \"rgb(252, 78, 42)\"], [0.5, \"rgb(253, 141, 60)\"], [0.625, \"rgb(254,", "[0.625, \"rgb(254, 178, 76)\"], [0.75, \"rgb(254, 217, 118)\"], [0.875, \"rgb(255, 237, 160)\"], [1,", "\"rgb(217,30,30)\"], ], \"jet\": [ [0, \"rgb(0,0,131)\"], [0.125, \"rgb(0,60,170)\"], [0.375, \"rgb(5,255,255)\"], [0.625, \"rgb(255,255,0)\"], [0.875,", "circular colormap created by me (Victo), and the reversed version of all of", "c[4:-1].split(\",\"))) + (255,) raise ValueError(\"one_rgba only supports rgb(r,g,b) and #rrggbb colors\") colorscales =", "# muted purple \"#8c564b\", # chestnut brown \"#e377c2\", # raspberry yogurt pink \"#7f7f7f\",", "non-negative numeric values \"Reds\": [ [0, \"rgb(220, 220, 220)\"], [0.2, \"rgb(245, 195, 157)\"],", "b, a) input can be an rgb string 'rgb(r,g,b)', '#rrggbb' if we decide", "if scale[1][0] < clip_percent: scale_low[1] = ((scale[1][0] + scale[0][0]) / 2, scale_low[1][1]) else:", "original qcodes.plots.colors Mofied by <NAME> for Measurement Control It modules makes available all", "Measurement Control It modules makes available all the colors maps from the qcodes,", "gray \"#bcbd22\", # curry yellow-green \"#17becf\", # blue-teal ] colorscales_raw = { \"Greys\":", "pyqtgraph, the circular colormap created by me (Victo), and the reversed version of", "= (0.0, clip_color) if scale[1][0] < clip_percent: scale_low[1] = ((scale[1][0] + scale[0][0]) /", "(0.0, (0, 255, 255, 255)), (1.0, (255, 255, 0, 255)), (0.5, (0, 0,", "clip_color) if scale[1][0] < clip_percent: scale_low[1] = ((scale[1][0] + scale[0][0]) / 2, scale_low[1][1])", "132)\"], [0.7, \"rgb(230, 145, 90)\"], [1, \"rgb(178, 10, 28)\"], ], # Scale for", "], \"electric\": [ [0, \"rgb(0,0,0)\"], [0.15, \"rgb(30,0,100)\"], [0.4, \"rgb(120,0,100)\"], [0.6, \"rgb(160,90,0)\"], [0.8, \"rgb(230,200,0)\"],", "pyqtgraph tuples \"\"\" if c[0] == \"#\" and len(c) == 7: return (int(c[1:3],", "0)\"], [0.875, \"rgb(255, 111, 0)\"], [1, \"rgb(255, 0, 0)\"], ], \"portland\": [ [0,", "numeric values \"Reds\": [ [0, \"rgb(220, 220, 220)\"], [0.2, \"rgb(245, 195, 157)\"], [0.4,", "\"YlGnBu\": [ [0, \"rgb(8, 29, 88)\"], [0.125, \"rgb(37, 52, 148)\"], [0.25, \"rgb(34, 94,", "[ (0.0, (68, 1, 84, 255)), (0.25, (58, 82, 139, 255)), (0.5, (32,", "255, 255, 255)), # (1.0, (255, 0, 0, 255)), # ], \"grey\": [(0.0,", "255)), (1, (255, 255, 255, 255)), (0, (0, 0, 0, 255)), ], \"flame\":", "245)\"], ], \"YlOrRd\": [ [0, \"rgb(128, 0, 38)\"], [0.125, \"rgb(189, 0, 38)\"], [0.25,", "= ((scale[1][0] + scale[0][0]) / 2, scale_low[1][1]) else: scale_low[1] = (clip_percent, scale_low[1][1]) colorscales[name", "(0, 0, 0, 255)), ], \"flame\": [ (0.2, (7, 0, 220, 255)), (0.5,", "{} for scale_name, scale in colorscales_raw.items(): colorscales[scale_name] = make_rgba(scale) for scale_name, scale in", "(0.75, (249, 142, 8, 255)), (1.0, (252, 254, 164, 255)), ], \"plasma\": [", "with cliping at green for name, scale in list(colorscales.items()): clip_percent = 0.03 clip_color", "[0.4, \"rgb(230,210,0)\"], [0.7, \"rgb(255,255,255)\"], [1, \"rgb(160,200,255)\"], ], \"earth\": [ [0, \"rgb(0,0,130)\"], [0.1, \"rgb(0,180,180)\"],", "we can make more, but for now this is just to convert plotly", "\"#26828e\"], [0.5019607843137255, \"#21918c\"], [0.5647058823529412, \"#1fa088\"], [0.6274509803921569, \"#28ae80\"], [0.6901960784313725, \"#3fbc73\"], [0.7529411764705882, \"#5ec962\"], [0.8156862745098039, \"#84d44b\"],", "i, color in enumerate(scale) ] colorscales[name + \"_reversed\"] = reversed_scale # Generate also", "= scale_low scale_high = list(scale) scale_high.insert(-1, scale[-1]) scale_high[-1] = (1.0, clip_color) if scale[-2][0]", "} def make_qcodes_anglemap45(): anglemap_colorlist = make_anglemap45_colorlist(N=9, use_hpl=False) len_colorlist = len(anglemap_colorlist) color_scale = [", "\"rgb(0, 152, 255)\"], [0.5, \"rgb(44, 255, 150)\"], [0.625, \"rgb(151, 255, 0)\"], [0.75, \"rgb(255,", "255)), (0.5, (187, 55, 84, 255)), (0.75, (249, 142, 8, 255)), (1.0, (252,", "255)), (1.0, (251, 252, 191, 255)), ], } def make_qcodes_anglemap45(): anglemap_colorlist = make_anglemap45_colorlist(N=9,", "[0.75, \"rgb(199, 233, 180)\"], [0.875, \"rgb(237, 248, 217)\"], [1, \"rgb(255, 255, 217)\"], ],", "\"rgb(255,210,0)\"], [1, \"rgb(255,255,255)\"], ], \"blackbody\": [ [0, \"rgb(0,0,0)\"], [0.2, \"rgb(230,0,0)\"], [0.4, \"rgb(230,210,0)\"], [0.7,", "233, 192)\"], [0.875, \"rgb(229, 245, 224)\"], [1, \"rgb(247, 252, 245)\"], ], \"YlOrRd\": [", "255)), (0.5, (32, 144, 140, 255)), (0.75, (94, 201, 97, 255)), (1.0, (253,", "], \"hot\": [ [0, \"rgb(0,0,0)\"], [0.3, \"rgb(230,0,0)\"], [0.6, \"rgb(255,210,0)\"], [1, \"rgb(255,255,255)\"], ], \"blackbody\":", "[0.6, \"rgb(160,90,0)\"], [0.8, \"rgb(230,200,0)\"], [1, \"rgb(255,250,220)\"], ], \"viridis\": [ [0, \"#440154\"], [0.06274509803921569, \"#48186a\"],", "scale for name, scale in list(colorscales.items()): last_idx = len(scale) - 1 reversed_scale =", "modules makes available all the colors maps from the qcodes, context menu of", "of the original qcodes.plots.colors Mofied by <NAME> for Measurement Control It modules makes", "\"rgb(255,0,0)\"]], # modified RdBu based on # www.sandia.gov/~kmorel/documents/ColorMaps/ColorMapsExpanded.pdf \"RdBu\": [ [0, \"rgb(5, 10,", "((scale[1][0] + scale[0][0]) / 2, scale_low[1][1]) else: scale_low[1] = (clip_percent, scale_low[1][1]) colorscales[name +", "for v, c in colorscale] def one_rgba(c): \"\"\" convert a single color value", "\"rgb(230,0,0)\"], [0.6, \"rgb(255,210,0)\"], [1, \"rgb(255,255,255)\"], ], \"blackbody\": [ [0, \"rgb(0,0,0)\"], [0.2, \"rgb(230,0,0)\"], [0.4,", "color_scale = [ [i / (len_colorlist - 1), \"rgb\" + repr(tuple((int(x * 255)", "255)), (0.75, (249, 142, 8, 255)), (1.0, (252, 254, 164, 255)), ], \"plasma\":", "[ (0.0, (255, 0, 4, 255)), (1.0, (255, 0, 0, 255)), ], #", "make_anglemap45_colorlist(N=9, use_hpl=False) len_colorlist = len(anglemap_colorlist) color_scale = [ [i / (len_colorlist - 1),", "= scale for name, scale in list(colorscales.items()): last_idx = len(scale) - 1 reversed_scale", "[0.875, \"rgb(255, 111, 0)\"], [1, \"rgb(255, 0, 0)\"], ], \"portland\": [ [0, \"rgb(12,51,131)\"],", "[0.3, \"rgb(230,0,0)\"], [0.6, \"rgb(255,210,0)\"], [1, \"rgb(255,255,255)\"], ], \"blackbody\": [ [0, \"rgb(0,0,0)\"], [0.2, \"rgb(230,0,0)\"],", "\"yellowy\": [ (0.0, (0, 0, 0, 255)), (0.2328863796753704, (32, 0, 129, 255)), (0.8362738179251941,", "color in enumerate(scale) ] colorscales[name + \"_reversed\"] = reversed_scale # Generate also all", "\"rgb(160,200,255)\"], ], \"earth\": [ [0, \"rgb(0,0,130)\"], [0.1, \"rgb(0,180,180)\"], [0.2, \"rgb(40,210,40)\"], [0.4, \"rgb(230,230,50)\"], [0.6,", "0, 255)), (0.2328863796753704, (32, 0, 129, 255)), (0.8362738179251941, (255, 255, 0, 255)), (0.5257586450247,", "scale in list(colorscales.items()): last_idx = len(scale) - 1 reversed_scale = [ (scale[last_idx -", "\"rgb(8, 29, 88)\"], [0.125, \"rgb(37, 52, 148)\"], [0.25, \"rgb(34, 94, 168)\"], [0.375, \"rgb(29,", "[ [0, \"rgb(150,0,90)\"], [0.125, \"rgb(0, 0, 200)\"], [0.25, \"rgb(0, 25, 255)\"], [0.375, \"rgb(0,", "[0.625, \"rgb(151, 255, 0)\"], [0.75, \"rgb(255, 234, 0)\"], [0.875, \"rgb(255, 111, 0)\"], [1,", "return (int(c[1:3], 16), int(c[3:5], 16), int(c[5:7], 16), 255) if c[:4] == \"rgb(\": return", "[0.5, \"rgb(44, 255, 150)\"], [0.625, \"rgb(151, 255, 0)\"], [0.75, \"rgb(255, 234, 0)\"], [0.875,", "(0.0, clip_color) if scale[1][0] < clip_percent: scale_low[1] = ((scale[1][0] + scale[0][0]) / 2,", "[0.125, \"rgb(37, 52, 148)\"], [0.25, \"rgb(34, 94, 168)\"], [0.375, \"rgb(29, 145, 192)\"], [0.5,", "255))], # Perceptually uniform sequential colormaps from Matplotlib 2.0 \"viridis\": [ (0.0, (68,", "[0.5, \"rgb(190,190,190)\"], [0.6, \"rgb(220, 170, 132)\"], [0.7, \"rgb(230, 145, 90)\"], [1, \"rgb(178, 10,", "\"rgb(230,0,0)\"], [0.4, \"rgb(230,210,0)\"], [0.7, \"rgb(255,255,255)\"], [1, \"rgb(160,200,255)\"], ], \"earth\": [ [0, \"rgb(0,0,130)\"], [0.1,", "int(c[3:5], 16), int(c[5:7], 16), 255) if c[:4] == \"rgb(\": return tuple(map(int, c[4:-1].split(\",\"))) +", "255)), ], \"flame\": [ (0.2, (7, 0, 220, 255)), (0.5, (236, 0, 134,", "Scale for non-positive numeric values \"Blues\": [ [0, \"rgb(5, 10, 172)\"], [0.35, \"rgb(40,", "context menu of the color bar from pyqtgraph, the circular colormap created by", "# Perceptually uniform sequential colormaps from Matplotlib 2.0 \"viridis\": [ (0.0, (68, 1,", "and #rrggbb colors\") colorscales = {} for scale_name, scale in colorscales_raw.items(): colorscales[scale_name] =", "[0.875, \"rgb(255, 237, 160)\"], [1, \"rgb(255, 255, 204)\"], ], \"bluered\": [[0, \"rgb(0,0,255)\"], [1,", "\"#e377c2\", # raspberry yogurt pink \"#7f7f7f\", # middle gray \"#bcbd22\", # curry yellow-green", "[0, \"rgb(128, 0, 38)\"], [0.125, \"rgb(189, 0, 38)\"], [0.25, \"rgb(227, 26, 28)\"], [0.375," ]
[ "import ProtocolTypeRouter from baserow.ws.routers import websocket_router from django.core.asgi import get_asgi_application django.setup() django_asgi_app =", "import django from channels.routing import ProtocolTypeRouter from baserow.ws.routers import websocket_router from django.core.asgi import", "websocket_router from django.core.asgi import get_asgi_application django.setup() django_asgi_app = get_asgi_application() application = ProtocolTypeRouter( {\"http\":", "from django.core.asgi import get_asgi_application django.setup() django_asgi_app = get_asgi_application() application = ProtocolTypeRouter( {\"http\": django_asgi_app,", "ProtocolTypeRouter from baserow.ws.routers import websocket_router from django.core.asgi import get_asgi_application django.setup() django_asgi_app = get_asgi_application()", "baserow.ws.routers import websocket_router from django.core.asgi import get_asgi_application django.setup() django_asgi_app = get_asgi_application() application =", "django.core.asgi import get_asgi_application django.setup() django_asgi_app = get_asgi_application() application = ProtocolTypeRouter( {\"http\": django_asgi_app, \"websocket\":", "channels.routing import ProtocolTypeRouter from baserow.ws.routers import websocket_router from django.core.asgi import get_asgi_application django.setup() django_asgi_app", "import get_asgi_application django.setup() django_asgi_app = get_asgi_application() application = ProtocolTypeRouter( {\"http\": django_asgi_app, \"websocket\": websocket_router}", "get_asgi_application django.setup() django_asgi_app = get_asgi_application() application = ProtocolTypeRouter( {\"http\": django_asgi_app, \"websocket\": websocket_router} )", "import websocket_router from django.core.asgi import get_asgi_application django.setup() django_asgi_app = get_asgi_application() application = ProtocolTypeRouter(", "from baserow.ws.routers import websocket_router from django.core.asgi import get_asgi_application django.setup() django_asgi_app = get_asgi_application() application", "from channels.routing import ProtocolTypeRouter from baserow.ws.routers import websocket_router from django.core.asgi import get_asgi_application django.setup()", "django from channels.routing import ProtocolTypeRouter from baserow.ws.routers import websocket_router from django.core.asgi import get_asgi_application" ]
[ "= supportClient.refresh_trusted_advisor_check( checkId=checkId ) logger.info(sanitize_json(response)) return response def checkAssumeRoleFailure(error): if \"(AccessDenied) when calling", "this file. This file is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "an \"AS IS\" BASIS, WITHOUT WARRANTIES # # OR CONDITIONS OF ANY KIND,", "supportClient, event['CheckId']) logger.info(\"Append the Refresh Status '\"+response['status']['status']+\"' to response.\" + \" This will", "Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # #", "IS\" BASIS, WITHOUT WARRANTIES # # OR CONDITIONS OF ANY KIND, express or", "client error %s\" % e) raise AWSTrustedAdvisorExplorerGenericException(e) except Exception as f: checkAssumeRoleFailure(str(f)) f=sanitize_string(f)", "Refresh Status '\"+response['status']['status']+\"' to response.\" + \" This will be consumed by downstream", "event['CheckId']) logger.info(\"Append the Refresh Status '\"+response['status']['status']+\"' to response.\" + \" This will be", "by downstream Lambda\") event[\"RefreshStatus\"] = response['status']['status'] return event except ClientError as e: checkAssumeRoleFailure(str(e))", "return y def refresh_trusted_advisor_checks(supportClient,checkId): logger.info('Refreshing Trusted Advisor Check:'+checkId) response = supportClient.refresh_trusted_advisor_check( checkId=checkId )", "ClientError class AWSTrustedAdvisorExplorerGenericException(Exception): pass logger = logging.getLogger() if \"LOG_LEVEL\" in os.environ: numeric_level =", "response = supportClient.refresh_trusted_advisor_check( checkId=checkId ) logger.info(sanitize_json(response)) return response def checkAssumeRoleFailure(error): if \"(AccessDenied) when", "os.environ['MASK_PII'].lower() == 'true': pattern=re.compile('\\d{12}') y = re.sub(pattern,lambda match: ((match.group()[1])+'XXXXXXX'+(match.group()[-4:])), y) return y def", "%s' % loglevel) logger.setLevel(level=numeric_level) def sanitize_json(x): d = x.copy() if os.environ['MASK_PII'].lower() == 'true':", "os.environ['MASK_PII'].lower() == 'true': for k, v in d.items(): if 'AccountId' in k: d[k]", "v[:3]+'-MASKED-'+v[-3:] return d def sanitize_string(x): y = str(x) if os.environ['MASK_PII'].lower() == 'true': pattern=re.compile('\\d{12}')", "response['status']['status'] return event except ClientError as e: checkAssumeRoleFailure(str(e)) e=sanitize_string(e) logger.error(\"Unexpected client error %s\"", "sanitize_json(x): d = x.copy() if os.environ['MASK_PII'].lower() == 'true': for k, v in d.items():", "response = refresh_trusted_advisor_checks( supportClient, event['CheckId']) logger.info(\"Append the Refresh Status '\"+response['status']['status']+\"' to response.\" +", "WITHOUT WARRANTIES # # OR CONDITIONS OF ANY KIND, express or implied. See", "###################################################################################################################### # Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. #", "y = re.sub(pattern,lambda match: ((match.group()[1])+'XXXXXXX'+(match.group()[-4:])), y) return y def refresh_trusted_advisor_checks(supportClient,checkId): logger.info('Refreshing Trusted Advisor", "# # OR CONDITIONS OF ANY KIND, express or implied. See the License", "located at # # # # http://www.apache.org/licenses/ # # # # or in", "# # # http://www.apache.org/licenses/ # # # # or in the \"license\" file", "http://www.apache.org/licenses/ # # # # or in the \"license\" file accompanying this file.", "Account def assumeRole(accountId): logger.info('Variables passed to assumeRole(): '+sanitize_string(accountId)) roleArn=\"arn:aws:iam::\"+str(accountId)+\":role/\"+os.environ['IAMRoleName'] #STS assume role call", "v[:3]+'-MASKED-'+v[-3:] if 'AccountEmail' in k: d[k] = v[:3]+'-MASKED-'+v[-3:] return d def sanitize_string(x): y", "re.sub(pattern,lambda match: ((match.group()[1])+'XXXXXXX'+(match.group()[-4:])), y) return y def refresh_trusted_advisor_checks(supportClient,checkId): logger.info('Refreshing Trusted Advisor Check:'+checkId) response", "return response def checkAssumeRoleFailure(error): if \"(AccessDenied) when calling the AssumeRole operation\" in error:", "client = boto3.client('s3') client.put_object(ACL='bucket-owner-full-control',StorageClass='STANDARD',Body=error, Bucket=os.environ['S3BucketName'],Key=key_name) return #Assume Role in Child Account def assumeRole(accountId):", "credentials\") supportClient=boto3.client(\"support\",region_name=\"us-east-1\", aws_access_key_id = roleCredentials['Credentials']['AccessKeyId'], aws_secret_access_key = roleCredentials['Credentials']['SecretAccessKey'], aws_session_token=roleCredentials['Credentials']['SessionToken']) response = refresh_trusted_advisor_checks( supportClient,", "This will be consumed by downstream Lambda\") event[\"RefreshStatus\"] = response['status']['status'] return event except", "getattr(logging, os.environ['LOG_LEVEL'].upper(), None) if not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' %", "numeric_level = getattr(logging, os.environ['LOG_LEVEL'].upper(), None) if not isinstance(numeric_level, int): raise ValueError('Invalid log level:", "return #Assume Role in Child Account def assumeRole(accountId): logger.info('Variables passed to assumeRole(): '+sanitize_string(accountId))", "d.items(): if 'AccountId' in k: d[k] = sanitize_string(v) if 'AccountName' in k: d[k]", "file is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES # # OR", "AssumeRole operation\" in error: pattern=re.compile('.*iam::(\\d{12}):.*$') match=pattern.match(error) logger.info('Assume Role Error for Account:'+match.group(1)) if match", "= x.copy() if os.environ['MASK_PII'].lower() == 'true': for k, v in d.items(): if 'AccountId'", "and limitations under the License. # ###################################################################################################################### import re,boto3,logging,os from datetime import date", "BASIS, WITHOUT WARRANTIES # # OR CONDITIONS OF ANY KIND, express or implied.", "Role in Child Account def assumeRole(accountId): logger.info('Variables passed to assumeRole(): '+sanitize_string(accountId)) roleArn=\"arn:aws:iam::\"+str(accountId)+\":role/\"+os.environ['IAMRoleName'] #STS", "# # # # http://www.apache.org/licenses/ # # # # or in the \"license\"", "event[\"RefreshStatus\"] = response['status']['status'] return event except ClientError as e: checkAssumeRoleFailure(str(e)) e=sanitize_string(e) logger.error(\"Unexpected client", "'AccountId' in k: d[k] = sanitize_string(v) if 'AccountName' in k: d[k] = v[:3]+'-MASKED-'+v[-3:]", "= sanitize_string(v) if 'AccountName' in k: d[k] = v[:3]+'-MASKED-'+v[-3:] if 'AccountEmail' in k:", "client.put_object(ACL='bucket-owner-full-control',StorageClass='STANDARD',Body=error, Bucket=os.environ['S3BucketName'],Key=key_name) return #Assume Role in Child Account def assumeRole(accountId): logger.info('Variables passed to", "Child Account def assumeRole(accountId): logger.info('Variables passed to assumeRole(): '+sanitize_string(accountId)) roleArn=\"arn:aws:iam::\"+str(accountId)+\":role/\"+os.environ['IAMRoleName'] #STS assume role", "the License for the specific language governing permissions # # and limitations under", "passed to assumeRole(): '+sanitize_string(accountId)) roleArn=\"arn:aws:iam::\"+str(accountId)+\":role/\"+os.environ['IAMRoleName'] #STS assume role call stsClient = boto3.client('sts') roleCredentials", "re,boto3,logging,os from datetime import date from botocore.exceptions import ClientError class AWSTrustedAdvisorExplorerGenericException(Exception): pass logger", "e=sanitize_string(e) logger.error(\"Unexpected client error %s\" % e) raise AWSTrustedAdvisorExplorerGenericException(e) except Exception as f:", "using the temporary credentials\") supportClient=boto3.client(\"support\",region_name=\"us-east-1\", aws_access_key_id = roleCredentials['Credentials']['AccessKeyId'], aws_secret_access_key = roleCredentials['Credentials']['SecretAccessKey'], aws_session_token=roleCredentials['Credentials']['SessionToken']) response", "'AccountEmail' in k: d[k] = v[:3]+'-MASKED-'+v[-3:] return d def sanitize_string(x): y = str(x)", "logger.info('Assume Role Error for Account:'+match.group(1)) if match != None: key_name='Logs/AssumeRoleFailure/'+ str(date.today().year)+ '/'+str(date.today().month)+'/'+str(date.today().day)+'/'+str(match.group(1))+'.log' client", "A copy of the License is located at # # # # http://www.apache.org/licenses/", "assumeRole(accountId): logger.info('Variables passed to assumeRole(): '+sanitize_string(accountId)) roleArn=\"arn:aws:iam::\"+str(accountId)+\":role/\"+os.environ['IAMRoleName'] #STS assume role call stsClient =", "(the \"License\"). You may not use this file except in compliance # #", "License Version 2.0 (the \"License\"). You may not use this file except in", "((match.group()[1])+'XXXXXXX'+(match.group()[-4:])), y) return y def refresh_trusted_advisor_checks(supportClient,checkId): logger.info('Refreshing Trusted Advisor Check:'+checkId) response = supportClient.refresh_trusted_advisor_check(", "event except ClientError as e: checkAssumeRoleFailure(str(e)) e=sanitize_string(e) logger.error(\"Unexpected client error %s\" % e)", "except Exception as f: checkAssumeRoleFailure(str(f)) f=sanitize_string(f) logger.error(\"Unexpected exception: %s\" % f) raise AWSTrustedAdvisorExplorerGenericException(f)", "\" This will be consumed by downstream Lambda\") event[\"RefreshStatus\"] = response['status']['status'] return event", "file accompanying this file. This file is distributed on an \"AS IS\" BASIS,", "Version 2.0 (the \"License\"). You may not use this file except in compliance", "pass logger = logging.getLogger() if \"LOG_LEVEL\" in os.environ: numeric_level = getattr(logging, os.environ['LOG_LEVEL'].upper(), None)", "logger = logging.getLogger() if \"LOG_LEVEL\" in os.environ: numeric_level = getattr(logging, os.environ['LOG_LEVEL'].upper(), None) if", "k: d[k] = sanitize_string(v) if 'AccountName' in k: d[k] = v[:3]+'-MASKED-'+v[-3:] if 'AccountEmail'", "def sanitize_string(x): y = str(x) if os.environ['MASK_PII'].lower() == 'true': pattern=re.compile('\\d{12}') y = re.sub(pattern,lambda", "d[k] = v[:3]+'-MASKED-'+v[-3:] return d def sanitize_string(x): y = str(x) if os.environ['MASK_PII'].lower() ==", "# # # # or in the \"license\" file accompanying this file. This", "call stsClient = boto3.client('sts') roleCredentials = stsClient.assume_role(RoleArn=roleArn, RoleSessionName=\"AWSTrustedAdvisorExplorerAssumeRole\") return roleCredentials def lambda_handler(event, context):", "date from botocore.exceptions import ClientError class AWSTrustedAdvisorExplorerGenericException(Exception): pass logger = logging.getLogger() if \"LOG_LEVEL\"", "is located at # # # # http://www.apache.org/licenses/ # # # # or", "logging.getLogger() if \"LOG_LEVEL\" in os.environ: numeric_level = getattr(logging, os.environ['LOG_LEVEL'].upper(), None) if not isinstance(numeric_level,", "checkAssumeRoleFailure(error): if \"(AccessDenied) when calling the AssumeRole operation\" in error: pattern=re.compile('.*iam::(\\d{12}):.*$') match=pattern.match(error) logger.info('Assume", "Role in child account\") roleCredentials=assumeRole(event['AccountId']) logger.info(\"Create boto3 support client using the temporary credentials\")", "if os.environ['MASK_PII'].lower() == 'true': pattern=re.compile('\\d{12}') y = re.sub(pattern,lambda match: ((match.group()[1])+'XXXXXXX'+(match.group()[-4:])), y) return y", "KIND, express or implied. See the License for the specific language governing permissions", "sanitize_string(v) if 'AccountName' in k: d[k] = v[:3]+'-MASKED-'+v[-3:] if 'AccountEmail' in k: d[k]", "e) raise AWSTrustedAdvisorExplorerGenericException(e) except Exception as f: checkAssumeRoleFailure(str(f)) f=sanitize_string(f) logger.error(\"Unexpected exception: %s\" %", "class AWSTrustedAdvisorExplorerGenericException(Exception): pass logger = logging.getLogger() if \"LOG_LEVEL\" in os.environ: numeric_level = getattr(logging,", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES # # OR CONDITIONS OF", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES # # OR CONDITIONS", "logger.info(\"Append the Refresh Status '\"+response['status']['status']+\"' to response.\" + \" This will be consumed", "isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' % loglevel) logger.setLevel(level=numeric_level) def sanitize_json(x): d", "in the \"license\" file accompanying this file. This file is distributed on an", "roleCredentials=assumeRole(event['AccountId']) logger.info(\"Create boto3 support client using the temporary credentials\") supportClient=boto3.client(\"support\",region_name=\"us-east-1\", aws_access_key_id = roleCredentials['Credentials']['AccessKeyId'],", "account\") roleCredentials=assumeRole(event['AccountId']) logger.info(\"Create boto3 support client using the temporary credentials\") supportClient=boto3.client(\"support\",region_name=\"us-east-1\", aws_access_key_id =", "d[k] = v[:3]+'-MASKED-'+v[-3:] if 'AccountEmail' in k: d[k] = v[:3]+'-MASKED-'+v[-3:] return d def", "error: pattern=re.compile('.*iam::(\\d{12}):.*$') match=pattern.match(error) logger.info('Assume Role Error for Account:'+match.group(1)) if match != None: key_name='Logs/AssumeRoleFailure/'+", "This file is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES # #", "= v[:3]+'-MASKED-'+v[-3:] return d def sanitize_string(x): y = str(x) if os.environ['MASK_PII'].lower() == 'true':", "not use this file except in compliance # # with the License. A", "str(date.today().year)+ '/'+str(date.today().month)+'/'+str(date.today().day)+'/'+str(match.group(1))+'.log' client = boto3.client('s3') client.put_object(ACL='bucket-owner-full-control',StorageClass='STANDARD',Body=error, Bucket=os.environ['S3BucketName'],Key=key_name) return #Assume Role in Child Account", "the Refresh Status '\"+response['status']['status']+\"' to response.\" + \" This will be consumed by", "affiliates. All Rights Reserved. # # # # Licensed under the Apache License", "if \"(AccessDenied) when calling the AssumeRole operation\" in error: pattern=re.compile('.*iam::(\\d{12}):.*$') match=pattern.match(error) logger.info('Assume Role", "'true': pattern=re.compile('\\d{12}') y = re.sub(pattern,lambda match: ((match.group()[1])+'XXXXXXX'+(match.group()[-4:])), y) return y def refresh_trusted_advisor_checks(supportClient,checkId): logger.info('Refreshing", "def sanitize_json(x): d = x.copy() if os.environ['MASK_PII'].lower() == 'true': for k, v in", "Account:'+match.group(1)) if match != None: key_name='Logs/AssumeRoleFailure/'+ str(date.today().year)+ '/'+str(date.today().month)+'/'+str(date.today().day)+'/'+str(match.group(1))+'.log' client = boto3.client('s3') client.put_object(ACL='bucket-owner-full-control',StorageClass='STANDARD',Body=error, Bucket=os.environ['S3BucketName'],Key=key_name)", "# ###################################################################################################################### import re,boto3,logging,os from datetime import date from botocore.exceptions import ClientError class", "specific language governing permissions # # and limitations under the License. # ######################################################################################################################", "'AccountName' in k: d[k] = v[:3]+'-MASKED-'+v[-3:] if 'AccountEmail' in k: d[k] = v[:3]+'-MASKED-'+v[-3:]", "= logging.getLogger() if \"LOG_LEVEL\" in os.environ: numeric_level = getattr(logging, os.environ['LOG_LEVEL'].upper(), None) if not", "2.0 (the \"License\"). You may not use this file except in compliance #", "== 'true': for k, v in d.items(): if 'AccountId' in k: d[k] =", "in child account\") roleCredentials=assumeRole(event['AccountId']) logger.info(\"Create boto3 support client using the temporary credentials\") supportClient=boto3.client(\"support\",region_name=\"us-east-1\",", "at # # # # http://www.apache.org/licenses/ # # # # or in the", "botocore.exceptions import ClientError class AWSTrustedAdvisorExplorerGenericException(Exception): pass logger = logging.getLogger() if \"LOG_LEVEL\" in os.environ:", "the \"license\" file accompanying this file. This file is distributed on an \"AS", "for k, v in d.items(): if 'AccountId' in k: d[k] = sanitize_string(v) if", "!= None: key_name='Logs/AssumeRoleFailure/'+ str(date.today().year)+ '/'+str(date.today().month)+'/'+str(date.today().day)+'/'+str(match.group(1))+'.log' client = boto3.client('s3') client.put_object(ACL='bucket-owner-full-control',StorageClass='STANDARD',Body=error, Bucket=os.environ['S3BucketName'],Key=key_name) return #Assume Role", "except ClientError as e: checkAssumeRoleFailure(str(e)) e=sanitize_string(e) logger.error(\"Unexpected client error %s\" % e) raise", "x.copy() if os.environ['MASK_PII'].lower() == 'true': for k, v in d.items(): if 'AccountId' in", "def checkAssumeRoleFailure(error): if \"(AccessDenied) when calling the AssumeRole operation\" in error: pattern=re.compile('.*iam::(\\d{12}):.*$') match=pattern.match(error)", "boto3 support client using the temporary credentials\") supportClient=boto3.client(\"support\",region_name=\"us-east-1\", aws_access_key_id = roleCredentials['Credentials']['AccessKeyId'], aws_secret_access_key =", "from botocore.exceptions import ClientError class AWSTrustedAdvisorExplorerGenericException(Exception): pass logger = logging.getLogger() if \"LOG_LEVEL\" in", "k, v in d.items(): if 'AccountId' in k: d[k] = sanitize_string(v) if 'AccountName'", "response.\" + \" This will be consumed by downstream Lambda\") event[\"RefreshStatus\"] = response['status']['status']", "AWSTrustedAdvisorExplorerGenericException(e) except Exception as f: checkAssumeRoleFailure(str(f)) f=sanitize_string(f) logger.error(\"Unexpected exception: %s\" % f) raise", "% e) raise AWSTrustedAdvisorExplorerGenericException(e) except Exception as f: checkAssumeRoleFailure(str(f)) f=sanitize_string(f) logger.error(\"Unexpected exception: %s\"", "role call stsClient = boto3.client('sts') roleCredentials = stsClient.assume_role(RoleArn=roleArn, RoleSessionName=\"AWSTrustedAdvisorExplorerAssumeRole\") return roleCredentials def lambda_handler(event,", "# with the License. A copy of the License is located at #", "License is located at # # # # http://www.apache.org/licenses/ # # # #", "%s\" % e) raise AWSTrustedAdvisorExplorerGenericException(e) except Exception as f: checkAssumeRoleFailure(str(f)) f=sanitize_string(f) logger.error(\"Unexpected exception:", "to response.\" + \" This will be consumed by downstream Lambda\") event[\"RefreshStatus\"] =", "if \"LOG_LEVEL\" in os.environ: numeric_level = getattr(logging, os.environ['LOG_LEVEL'].upper(), None) if not isinstance(numeric_level, int):", "\"License\"). You may not use this file except in compliance # # with", "support client using the temporary credentials\") supportClient=boto3.client(\"support\",region_name=\"us-east-1\", aws_access_key_id = roleCredentials['Credentials']['AccessKeyId'], aws_secret_access_key = roleCredentials['Credentials']['SecretAccessKey'],", "temporary credentials\") supportClient=boto3.client(\"support\",region_name=\"us-east-1\", aws_access_key_id = roleCredentials['Credentials']['AccessKeyId'], aws_secret_access_key = roleCredentials['Credentials']['SecretAccessKey'], aws_session_token=roleCredentials['Credentials']['SessionToken']) response = refresh_trusted_advisor_checks(", "the temporary credentials\") supportClient=boto3.client(\"support\",region_name=\"us-east-1\", aws_access_key_id = roleCredentials['Credentials']['AccessKeyId'], aws_secret_access_key = roleCredentials['Credentials']['SecretAccessKey'], aws_session_token=roleCredentials['Credentials']['SessionToken']) response =", "in k: d[k] = v[:3]+'-MASKED-'+v[-3:] if 'AccountEmail' in k: d[k] = v[:3]+'-MASKED-'+v[-3:] return", "Check:'+checkId) response = supportClient.refresh_trusted_advisor_check( checkId=checkId ) logger.info(sanitize_json(response)) return response def checkAssumeRoleFailure(error): if \"(AccessDenied)", "context): try: logger.info(sanitize_json(event)) logger.info(\"Assume Role in child account\") roleCredentials=assumeRole(event['AccountId']) logger.info(\"Create boto3 support client", "2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # # #", "may not use this file except in compliance # # with the License.", "in k: d[k] = sanitize_string(v) if 'AccountName' in k: d[k] = v[:3]+'-MASKED-'+v[-3:] if", "aws_secret_access_key = roleCredentials['Credentials']['SecretAccessKey'], aws_session_token=roleCredentials['Credentials']['SessionToken']) response = refresh_trusted_advisor_checks( supportClient, event['CheckId']) logger.info(\"Append the Refresh Status", "if 'AccountName' in k: d[k] = v[:3]+'-MASKED-'+v[-3:] if 'AccountEmail' in k: d[k] =", "the License. # ###################################################################################################################### import re,boto3,logging,os from datetime import date from botocore.exceptions import", "match=pattern.match(error) logger.info('Assume Role Error for Account:'+match.group(1)) if match != None: key_name='Logs/AssumeRoleFailure/'+ str(date.today().year)+ '/'+str(date.today().month)+'/'+str(date.today().day)+'/'+str(match.group(1))+'.log'", "\"license\" file accompanying this file. This file is distributed on an \"AS IS\"", "Licensed under the Apache License Version 2.0 (the \"License\"). You may not use", "if os.environ['MASK_PII'].lower() == 'true': for k, v in d.items(): if 'AccountId' in k:", "the AssumeRole operation\" in error: pattern=re.compile('.*iam::(\\d{12}):.*$') match=pattern.match(error) logger.info('Assume Role Error for Account:'+match.group(1)) if", "+ \" This will be consumed by downstream Lambda\") event[\"RefreshStatus\"] = response['status']['status'] return", "supportClient=boto3.client(\"support\",region_name=\"us-east-1\", aws_access_key_id = roleCredentials['Credentials']['AccessKeyId'], aws_secret_access_key = roleCredentials['Credentials']['SecretAccessKey'], aws_session_token=roleCredentials['Credentials']['SessionToken']) response = refresh_trusted_advisor_checks( supportClient, event['CheckId'])", "d[k] = sanitize_string(v) if 'AccountName' in k: d[k] = v[:3]+'-MASKED-'+v[-3:] if 'AccountEmail' in", "# # or in the \"license\" file accompanying this file. This file is", "match: ((match.group()[1])+'XXXXXXX'+(match.group()[-4:])), y) return y def refresh_trusted_advisor_checks(supportClient,checkId): logger.info('Refreshing Trusted Advisor Check:'+checkId) response =", "logger.info('Variables passed to assumeRole(): '+sanitize_string(accountId)) roleArn=\"arn:aws:iam::\"+str(accountId)+\":role/\"+os.environ['IAMRoleName'] #STS assume role call stsClient = boto3.client('sts')", "'true': for k, v in d.items(): if 'AccountId' in k: d[k] = sanitize_string(v)", "key_name='Logs/AssumeRoleFailure/'+ str(date.today().year)+ '/'+str(date.today().month)+'/'+str(date.today().day)+'/'+str(match.group(1))+'.log' client = boto3.client('s3') client.put_object(ACL='bucket-owner-full-control',StorageClass='STANDARD',Body=error, Bucket=os.environ['S3BucketName'],Key=key_name) return #Assume Role in Child", "Trusted Advisor Check:'+checkId) response = supportClient.refresh_trusted_advisor_check( checkId=checkId ) logger.info(sanitize_json(response)) return response def checkAssumeRoleFailure(error):", "to assumeRole(): '+sanitize_string(accountId)) roleArn=\"arn:aws:iam::\"+str(accountId)+\":role/\"+os.environ['IAMRoleName'] #STS assume role call stsClient = boto3.client('sts') roleCredentials =", "pattern=re.compile('\\d{12}') y = re.sub(pattern,lambda match: ((match.group()[1])+'XXXXXXX'+(match.group()[-4:])), y) return y def refresh_trusted_advisor_checks(supportClient,checkId): logger.info('Refreshing Trusted", "return roleCredentials def lambda_handler(event, context): try: logger.info(sanitize_json(event)) logger.info(\"Assume Role in child account\") roleCredentials=assumeRole(event['AccountId'])", "will be consumed by downstream Lambda\") event[\"RefreshStatus\"] = response['status']['status'] return event except ClientError", "datetime import date from botocore.exceptions import ClientError class AWSTrustedAdvisorExplorerGenericException(Exception): pass logger = logging.getLogger()", "checkAssumeRoleFailure(str(e)) e=sanitize_string(e) logger.error(\"Unexpected client error %s\" % e) raise AWSTrustedAdvisorExplorerGenericException(e) except Exception as", "client using the temporary credentials\") supportClient=boto3.client(\"support\",region_name=\"us-east-1\", aws_access_key_id = roleCredentials['Credentials']['AccessKeyId'], aws_secret_access_key = roleCredentials['Credentials']['SecretAccessKey'], aws_session_token=roleCredentials['Credentials']['SessionToken'])", "# # Licensed under the Apache License Version 2.0 (the \"License\"). You may", "logger.error(\"Unexpected client error %s\" % e) raise AWSTrustedAdvisorExplorerGenericException(e) except Exception as f: checkAssumeRoleFailure(str(f))", "# Licensed under the Apache License Version 2.0 (the \"License\"). You may not", "Rights Reserved. # # # # Licensed under the Apache License Version 2.0", "y) return y def refresh_trusted_advisor_checks(supportClient,checkId): logger.info('Refreshing Trusted Advisor Check:'+checkId) response = supportClient.refresh_trusted_advisor_check( checkId=checkId", "or implied. See the License for the specific language governing permissions # #", "log level: %s' % loglevel) logger.setLevel(level=numeric_level) def sanitize_json(x): d = x.copy() if os.environ['MASK_PII'].lower()", "# # http://www.apache.org/licenses/ # # # # or in the \"license\" file accompanying", "= v[:3]+'-MASKED-'+v[-3:] if 'AccountEmail' in k: d[k] = v[:3]+'-MASKED-'+v[-3:] return d def sanitize_string(x):", "All Rights Reserved. # # # # Licensed under the Apache License Version", "= stsClient.assume_role(RoleArn=roleArn, RoleSessionName=\"AWSTrustedAdvisorExplorerAssumeRole\") return roleCredentials def lambda_handler(event, context): try: logger.info(sanitize_json(event)) logger.info(\"Assume Role in", "in error: pattern=re.compile('.*iam::(\\d{12}):.*$') match=pattern.match(error) logger.info('Assume Role Error for Account:'+match.group(1)) if match != None:", "pattern=re.compile('.*iam::(\\d{12}):.*$') match=pattern.match(error) logger.info('Assume Role Error for Account:'+match.group(1)) if match != None: key_name='Logs/AssumeRoleFailure/'+ str(date.today().year)+", "try: logger.info(sanitize_json(event)) logger.info(\"Assume Role in child account\") roleCredentials=assumeRole(event['AccountId']) logger.info(\"Create boto3 support client using", "as e: checkAssumeRoleFailure(str(e)) e=sanitize_string(e) logger.error(\"Unexpected client error %s\" % e) raise AWSTrustedAdvisorExplorerGenericException(e) except", "if not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' % loglevel) logger.setLevel(level=numeric_level) def", "# and limitations under the License. # ###################################################################################################################### import re,boto3,logging,os from datetime import", "Reserved. # # # # Licensed under the Apache License Version 2.0 (the", "Role Error for Account:'+match.group(1)) if match != None: key_name='Logs/AssumeRoleFailure/'+ str(date.today().year)+ '/'+str(date.today().month)+'/'+str(date.today().day)+'/'+str(match.group(1))+'.log' client =", "in compliance # # with the License. A copy of the License is", "logger.info(sanitize_json(event)) logger.info(\"Assume Role in child account\") roleCredentials=assumeRole(event['AccountId']) logger.info(\"Create boto3 support client using the", "the License is located at # # # # http://www.apache.org/licenses/ # # #", "ClientError as e: checkAssumeRoleFailure(str(e)) e=sanitize_string(e) logger.error(\"Unexpected client error %s\" % e) raise AWSTrustedAdvisorExplorerGenericException(e)", "= getattr(logging, os.environ['LOG_LEVEL'].upper(), None) if not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s'", "compliance # # with the License. A copy of the License is located", "Amazon.com, Inc. or its affiliates. All Rights Reserved. # # # # Licensed", "Bucket=os.environ['S3BucketName'],Key=key_name) return #Assume Role in Child Account def assumeRole(accountId): logger.info('Variables passed to assumeRole():", "error %s\" % e) raise AWSTrustedAdvisorExplorerGenericException(e) except Exception as f: checkAssumeRoleFailure(str(f)) f=sanitize_string(f) logger.error(\"Unexpected", "y def refresh_trusted_advisor_checks(supportClient,checkId): logger.info('Refreshing Trusted Advisor Check:'+checkId) response = supportClient.refresh_trusted_advisor_check( checkId=checkId ) logger.info(sanitize_json(response))", "this file except in compliance # # with the License. A copy of", "= boto3.client('sts') roleCredentials = stsClient.assume_role(RoleArn=roleArn, RoleSessionName=\"AWSTrustedAdvisorExplorerAssumeRole\") return roleCredentials def lambda_handler(event, context): try: logger.info(sanitize_json(event))", "roleCredentials = stsClient.assume_role(RoleArn=roleArn, RoleSessionName=\"AWSTrustedAdvisorExplorerAssumeRole\") return roleCredentials def lambda_handler(event, context): try: logger.info(sanitize_json(event)) logger.info(\"Assume Role", "= re.sub(pattern,lambda match: ((match.group()[1])+'XXXXXXX'+(match.group()[-4:])), y) return y def refresh_trusted_advisor_checks(supportClient,checkId): logger.info('Refreshing Trusted Advisor Check:'+checkId)", "for Account:'+match.group(1)) if match != None: key_name='Logs/AssumeRoleFailure/'+ str(date.today().year)+ '/'+str(date.today().month)+'/'+str(date.today().day)+'/'+str(match.group(1))+'.log' client = boto3.client('s3') client.put_object(ACL='bucket-owner-full-control',StorageClass='STANDARD',Body=error,", "be consumed by downstream Lambda\") event[\"RefreshStatus\"] = response['status']['status'] return event except ClientError as", "% loglevel) logger.setLevel(level=numeric_level) def sanitize_json(x): d = x.copy() if os.environ['MASK_PII'].lower() == 'true': for", "governing permissions # # and limitations under the License. # ###################################################################################################################### import re,boto3,logging,os", "from datetime import date from botocore.exceptions import ClientError class AWSTrustedAdvisorExplorerGenericException(Exception): pass logger =", "import ClientError class AWSTrustedAdvisorExplorerGenericException(Exception): pass logger = logging.getLogger() if \"LOG_LEVEL\" in os.environ: numeric_level", "v in d.items(): if 'AccountId' in k: d[k] = sanitize_string(v) if 'AccountName' in", ") logger.info(sanitize_json(response)) return response def checkAssumeRoleFailure(error): if \"(AccessDenied) when calling the AssumeRole operation\"", "limitations under the License. # ###################################################################################################################### import re,boto3,logging,os from datetime import date from", "raise AWSTrustedAdvisorExplorerGenericException(e) except Exception as f: checkAssumeRoleFailure(str(f)) f=sanitize_string(f) logger.error(\"Unexpected exception: %s\" % f)", "file except in compliance # # with the License. A copy of the", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES # # OR CONDITIONS OF ANY", "# OR CONDITIONS OF ANY KIND, express or implied. See the License for", "except in compliance # # with the License. A copy of the License", "Inc. or its affiliates. All Rights Reserved. # # # # Licensed under", "aws_access_key_id = roleCredentials['Credentials']['AccessKeyId'], aws_secret_access_key = roleCredentials['Credentials']['SecretAccessKey'], aws_session_token=roleCredentials['Credentials']['SessionToken']) response = refresh_trusted_advisor_checks( supportClient, event['CheckId']) logger.info(\"Append", "def refresh_trusted_advisor_checks(supportClient,checkId): logger.info('Refreshing Trusted Advisor Check:'+checkId) response = supportClient.refresh_trusted_advisor_check( checkId=checkId ) logger.info(sanitize_json(response)) return", "#STS assume role call stsClient = boto3.client('sts') roleCredentials = stsClient.assume_role(RoleArn=roleArn, RoleSessionName=\"AWSTrustedAdvisorExplorerAssumeRole\") return roleCredentials", "its affiliates. All Rights Reserved. # # # # Licensed under the Apache", "You may not use this file except in compliance # # with the", "the Apache License Version 2.0 (the \"License\"). You may not use this file", "= roleCredentials['Credentials']['AccessKeyId'], aws_secret_access_key = roleCredentials['Credentials']['SecretAccessKey'], aws_session_token=roleCredentials['Credentials']['SessionToken']) response = refresh_trusted_advisor_checks( supportClient, event['CheckId']) logger.info(\"Append the", "= roleCredentials['Credentials']['SecretAccessKey'], aws_session_token=roleCredentials['Credentials']['SessionToken']) response = refresh_trusted_advisor_checks( supportClient, event['CheckId']) logger.info(\"Append the Refresh Status '\"+response['status']['status']+\"'", "boto3.client('sts') roleCredentials = stsClient.assume_role(RoleArn=roleArn, RoleSessionName=\"AWSTrustedAdvisorExplorerAssumeRole\") return roleCredentials def lambda_handler(event, context): try: logger.info(sanitize_json(event)) logger.info(\"Assume", "downstream Lambda\") event[\"RefreshStatus\"] = response['status']['status'] return event except ClientError as e: checkAssumeRoleFailure(str(e)) e=sanitize_string(e)", "roleCredentials def lambda_handler(event, context): try: logger.info(sanitize_json(event)) logger.info(\"Assume Role in child account\") roleCredentials=assumeRole(event['AccountId']) logger.info(\"Create", "OR CONDITIONS OF ANY KIND, express or implied. See the License for the", "not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' % loglevel) logger.setLevel(level=numeric_level) def sanitize_json(x):", "in os.environ: numeric_level = getattr(logging, os.environ['LOG_LEVEL'].upper(), None) if not isinstance(numeric_level, int): raise ValueError('Invalid", "the specific language governing permissions # # and limitations under the License. #", "roleCredentials['Credentials']['AccessKeyId'], aws_secret_access_key = roleCredentials['Credentials']['SecretAccessKey'], aws_session_token=roleCredentials['Credentials']['SessionToken']) response = refresh_trusted_advisor_checks( supportClient, event['CheckId']) logger.info(\"Append the Refresh", "= boto3.client('s3') client.put_object(ACL='bucket-owner-full-control',StorageClass='STANDARD',Body=error, Bucket=os.environ['S3BucketName'],Key=key_name) return #Assume Role in Child Account def assumeRole(accountId): logger.info('Variables", "child account\") roleCredentials=assumeRole(event['AccountId']) logger.info(\"Create boto3 support client using the temporary credentials\") supportClient=boto3.client(\"support\",region_name=\"us-east-1\", aws_access_key_id", "or in the \"license\" file accompanying this file. This file is distributed on", "express or implied. See the License for the specific language governing permissions #", "# http://www.apache.org/licenses/ # # # # or in the \"license\" file accompanying this", "k: d[k] = v[:3]+'-MASKED-'+v[-3:] return d def sanitize_string(x): y = str(x) if os.environ['MASK_PII'].lower()", "WARRANTIES # # OR CONDITIONS OF ANY KIND, express or implied. See the", "OF ANY KIND, express or implied. See the License for the specific language", "implied. See the License for the specific language governing permissions # # and", "if 'AccountEmail' in k: d[k] = v[:3]+'-MASKED-'+v[-3:] return d def sanitize_string(x): y =", "= refresh_trusted_advisor_checks( supportClient, event['CheckId']) logger.info(\"Append the Refresh Status '\"+response['status']['status']+\"' to response.\" + \"", "\"LOG_LEVEL\" in os.environ: numeric_level = getattr(logging, os.environ['LOG_LEVEL'].upper(), None) if not isinstance(numeric_level, int): raise", "# # and limitations under the License. # ###################################################################################################################### import re,boto3,logging,os from datetime", "raise ValueError('Invalid log level: %s' % loglevel) logger.setLevel(level=numeric_level) def sanitize_json(x): d = x.copy()", "os.environ['LOG_LEVEL'].upper(), None) if not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' % loglevel)", "logger.setLevel(level=numeric_level) def sanitize_json(x): d = x.copy() if os.environ['MASK_PII'].lower() == 'true': for k, v", "###################################################################################################################### import re,boto3,logging,os from datetime import date from botocore.exceptions import ClientError class AWSTrustedAdvisorExplorerGenericException(Exception):", "import date from botocore.exceptions import ClientError class AWSTrustedAdvisorExplorerGenericException(Exception): pass logger = logging.getLogger() if", "= response['status']['status'] return event except ClientError as e: checkAssumeRoleFailure(str(e)) e=sanitize_string(e) logger.error(\"Unexpected client error", "consumed by downstream Lambda\") event[\"RefreshStatus\"] = response['status']['status'] return event except ClientError as e:", "logger.info(\"Create boto3 support client using the temporary credentials\") supportClient=boto3.client(\"support\",region_name=\"us-east-1\", aws_access_key_id = roleCredentials['Credentials']['AccessKeyId'], aws_secret_access_key", "#Assume Role in Child Account def assumeRole(accountId): logger.info('Variables passed to assumeRole(): '+sanitize_string(accountId)) roleArn=\"arn:aws:iam::\"+str(accountId)+\":role/\"+os.environ['IAMRoleName']", "y = str(x) if os.environ['MASK_PII'].lower() == 'true': pattern=re.compile('\\d{12}') y = re.sub(pattern,lambda match: ((match.group()[1])+'XXXXXXX'+(match.group()[-4:])),", "\"(AccessDenied) when calling the AssumeRole operation\" in error: pattern=re.compile('.*iam::(\\d{12}):.*$') match=pattern.match(error) logger.info('Assume Role Error", "import re,boto3,logging,os from datetime import date from botocore.exceptions import ClientError class AWSTrustedAdvisorExplorerGenericException(Exception): pass", "License. A copy of the License is located at # # # #", "Advisor Check:'+checkId) response = supportClient.refresh_trusted_advisor_check( checkId=checkId ) logger.info(sanitize_json(response)) return response def checkAssumeRoleFailure(error): if", "language governing permissions # # and limitations under the License. # ###################################################################################################################### import", "e: checkAssumeRoleFailure(str(e)) e=sanitize_string(e) logger.error(\"Unexpected client error %s\" % e) raise AWSTrustedAdvisorExplorerGenericException(e) except Exception", "Lambda\") event[\"RefreshStatus\"] = response['status']['status'] return event except ClientError as e: checkAssumeRoleFailure(str(e)) e=sanitize_string(e) logger.error(\"Unexpected", "CONDITIONS OF ANY KIND, express or implied. See the License for the specific", "aws_session_token=roleCredentials['Credentials']['SessionToken']) response = refresh_trusted_advisor_checks( supportClient, event['CheckId']) logger.info(\"Append the Refresh Status '\"+response['status']['status']+\"' to response.\"", "None) if not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' % loglevel) logger.setLevel(level=numeric_level)", "operation\" in error: pattern=re.compile('.*iam::(\\d{12}):.*$') match=pattern.match(error) logger.info('Assume Role Error for Account:'+match.group(1)) if match !=", "ANY KIND, express or implied. See the License for the specific language governing", "os.environ: numeric_level = getattr(logging, os.environ['LOG_LEVEL'].upper(), None) if not isinstance(numeric_level, int): raise ValueError('Invalid log", "loglevel) logger.setLevel(level=numeric_level) def sanitize_json(x): d = x.copy() if os.environ['MASK_PII'].lower() == 'true': for k,", "refresh_trusted_advisor_checks(supportClient,checkId): logger.info('Refreshing Trusted Advisor Check:'+checkId) response = supportClient.refresh_trusted_advisor_check( checkId=checkId ) logger.info(sanitize_json(response)) return response", "assume role call stsClient = boto3.client('sts') roleCredentials = stsClient.assume_role(RoleArn=roleArn, RoleSessionName=\"AWSTrustedAdvisorExplorerAssumeRole\") return roleCredentials def", "# # # Licensed under the Apache License Version 2.0 (the \"License\"). You", "License for the specific language governing permissions # # and limitations under the", "d def sanitize_string(x): y = str(x) if os.environ['MASK_PII'].lower() == 'true': pattern=re.compile('\\d{12}') y =", "accompanying this file. This file is distributed on an \"AS IS\" BASIS, WITHOUT", "supportClient.refresh_trusted_advisor_check( checkId=checkId ) logger.info(sanitize_json(response)) return response def checkAssumeRoleFailure(error): if \"(AccessDenied) when calling the", "if 'AccountId' in k: d[k] = sanitize_string(v) if 'AccountName' in k: d[k] =", "None: key_name='Logs/AssumeRoleFailure/'+ str(date.today().year)+ '/'+str(date.today().month)+'/'+str(date.today().day)+'/'+str(match.group(1))+'.log' client = boto3.client('s3') client.put_object(ACL='bucket-owner-full-control',StorageClass='STANDARD',Body=error, Bucket=os.environ['S3BucketName'],Key=key_name) return #Assume Role in", "match != None: key_name='Logs/AssumeRoleFailure/'+ str(date.today().year)+ '/'+str(date.today().month)+'/'+str(date.today().day)+'/'+str(match.group(1))+'.log' client = boto3.client('s3') client.put_object(ACL='bucket-owner-full-control',StorageClass='STANDARD',Body=error, Bucket=os.environ['S3BucketName'],Key=key_name) return #Assume", "boto3.client('s3') client.put_object(ACL='bucket-owner-full-control',StorageClass='STANDARD',Body=error, Bucket=os.environ['S3BucketName'],Key=key_name) return #Assume Role in Child Account def assumeRole(accountId): logger.info('Variables passed", "level: %s' % loglevel) logger.setLevel(level=numeric_level) def sanitize_json(x): d = x.copy() if os.environ['MASK_PII'].lower() ==", "logger.info('Refreshing Trusted Advisor Check:'+checkId) response = supportClient.refresh_trusted_advisor_check( checkId=checkId ) logger.info(sanitize_json(response)) return response def", "file. This file is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES #", "# # with the License. A copy of the License is located at", "See the License for the specific language governing permissions # # and limitations", "def assumeRole(accountId): logger.info('Variables passed to assumeRole(): '+sanitize_string(accountId)) roleArn=\"arn:aws:iam::\"+str(accountId)+\":role/\"+os.environ['IAMRoleName'] #STS assume role call stsClient", "return event except ClientError as e: checkAssumeRoleFailure(str(e)) e=sanitize_string(e) logger.error(\"Unexpected client error %s\" %", "if match != None: key_name='Logs/AssumeRoleFailure/'+ str(date.today().year)+ '/'+str(date.today().month)+'/'+str(date.today().day)+'/'+str(match.group(1))+'.log' client = boto3.client('s3') client.put_object(ACL='bucket-owner-full-control',StorageClass='STANDARD',Body=error, Bucket=os.environ['S3BucketName'],Key=key_name) return", "or its affiliates. All Rights Reserved. # # # # Licensed under the", "permissions # # and limitations under the License. # ###################################################################################################################### import re,boto3,logging,os from", "roleCredentials['Credentials']['SecretAccessKey'], aws_session_token=roleCredentials['Credentials']['SessionToken']) response = refresh_trusted_advisor_checks( supportClient, event['CheckId']) logger.info(\"Append the Refresh Status '\"+response['status']['status']+\"' to", "# # # or in the \"license\" file accompanying this file. This file", "in Child Account def assumeRole(accountId): logger.info('Variables passed to assumeRole(): '+sanitize_string(accountId)) roleArn=\"arn:aws:iam::\"+str(accountId)+\":role/\"+os.environ['IAMRoleName'] #STS assume", "when calling the AssumeRole operation\" in error: pattern=re.compile('.*iam::(\\d{12}):.*$') match=pattern.match(error) logger.info('Assume Role Error for", "AWSTrustedAdvisorExplorerGenericException(Exception): pass logger = logging.getLogger() if \"LOG_LEVEL\" in os.environ: numeric_level = getattr(logging, os.environ['LOG_LEVEL'].upper(),", "= str(x) if os.environ['MASK_PII'].lower() == 'true': pattern=re.compile('\\d{12}') y = re.sub(pattern,lambda match: ((match.group()[1])+'XXXXXXX'+(match.group()[-4:])), y)", "ValueError('Invalid log level: %s' % loglevel) logger.setLevel(level=numeric_level) def sanitize_json(x): d = x.copy() if", "logger.info(sanitize_json(response)) return response def checkAssumeRoleFailure(error): if \"(AccessDenied) when calling the AssumeRole operation\" in", "RoleSessionName=\"AWSTrustedAdvisorExplorerAssumeRole\") return roleCredentials def lambda_handler(event, context): try: logger.info(sanitize_json(event)) logger.info(\"Assume Role in child account\")", "assumeRole(): '+sanitize_string(accountId)) roleArn=\"arn:aws:iam::\"+str(accountId)+\":role/\"+os.environ['IAMRoleName'] #STS assume role call stsClient = boto3.client('sts') roleCredentials = stsClient.assume_role(RoleArn=roleArn,", "refresh_trusted_advisor_checks( supportClient, event['CheckId']) logger.info(\"Append the Refresh Status '\"+response['status']['status']+\"' to response.\" + \" This", "roleArn=\"arn:aws:iam::\"+str(accountId)+\":role/\"+os.environ['IAMRoleName'] #STS assume role call stsClient = boto3.client('sts') roleCredentials = stsClient.assume_role(RoleArn=roleArn, RoleSessionName=\"AWSTrustedAdvisorExplorerAssumeRole\") return", "== 'true': pattern=re.compile('\\d{12}') y = re.sub(pattern,lambda match: ((match.group()[1])+'XXXXXXX'+(match.group()[-4:])), y) return y def refresh_trusted_advisor_checks(supportClient,checkId):", "sanitize_string(x): y = str(x) if os.environ['MASK_PII'].lower() == 'true': pattern=re.compile('\\d{12}') y = re.sub(pattern,lambda match:", "# or in the \"license\" file accompanying this file. This file is distributed", "stsClient.assume_role(RoleArn=roleArn, RoleSessionName=\"AWSTrustedAdvisorExplorerAssumeRole\") return roleCredentials def lambda_handler(event, context): try: logger.info(sanitize_json(event)) logger.info(\"Assume Role in child", "'/'+str(date.today().month)+'/'+str(date.today().day)+'/'+str(match.group(1))+'.log' client = boto3.client('s3') client.put_object(ACL='bucket-owner-full-control',StorageClass='STANDARD',Body=error, Bucket=os.environ['S3BucketName'],Key=key_name) return #Assume Role in Child Account def", "Apache License Version 2.0 (the \"License\"). You may not use this file except", "in k: d[k] = v[:3]+'-MASKED-'+v[-3:] return d def sanitize_string(x): y = str(x) if", "response def checkAssumeRoleFailure(error): if \"(AccessDenied) when calling the AssumeRole operation\" in error: pattern=re.compile('.*iam::(\\d{12}):.*$')", "with the License. A copy of the License is located at # #", "of the License is located at # # # # http://www.apache.org/licenses/ # #", "stsClient = boto3.client('sts') roleCredentials = stsClient.assume_role(RoleArn=roleArn, RoleSessionName=\"AWSTrustedAdvisorExplorerAssumeRole\") return roleCredentials def lambda_handler(event, context): try:", "calling the AssumeRole operation\" in error: pattern=re.compile('.*iam::(\\d{12}):.*$') match=pattern.match(error) logger.info('Assume Role Error for Account:'+match.group(1))", "the License. A copy of the License is located at # # #", "k: d[k] = v[:3]+'-MASKED-'+v[-3:] if 'AccountEmail' in k: d[k] = v[:3]+'-MASKED-'+v[-3:] return d", "Error for Account:'+match.group(1)) if match != None: key_name='Logs/AssumeRoleFailure/'+ str(date.today().year)+ '/'+str(date.today().month)+'/'+str(date.today().day)+'/'+str(match.group(1))+'.log' client = boto3.client('s3')", "# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. # #", "\"AS IS\" BASIS, WITHOUT WARRANTIES # # OR CONDITIONS OF ANY KIND, express", "# # # # Licensed under the Apache License Version 2.0 (the \"License\").", "use this file except in compliance # # with the License. A copy", "'+sanitize_string(accountId)) roleArn=\"arn:aws:iam::\"+str(accountId)+\":role/\"+os.environ['IAMRoleName'] #STS assume role call stsClient = boto3.client('sts') roleCredentials = stsClient.assume_role(RoleArn=roleArn, RoleSessionName=\"AWSTrustedAdvisorExplorerAssumeRole\")", "for the specific language governing permissions # # and limitations under the License.", "'\"+response['status']['status']+\"' to response.\" + \" This will be consumed by downstream Lambda\") event[\"RefreshStatus\"]", "Status '\"+response['status']['status']+\"' to response.\" + \" This will be consumed by downstream Lambda\")", "d = x.copy() if os.environ['MASK_PII'].lower() == 'true': for k, v in d.items(): if", "under the License. # ###################################################################################################################### import re,boto3,logging,os from datetime import date from botocore.exceptions", "copy of the License is located at # # # # http://www.apache.org/licenses/ #", "under the Apache License Version 2.0 (the \"License\"). You may not use this", "logger.info(\"Assume Role in child account\") roleCredentials=assumeRole(event['AccountId']) logger.info(\"Create boto3 support client using the temporary", "return d def sanitize_string(x): y = str(x) if os.environ['MASK_PII'].lower() == 'true': pattern=re.compile('\\d{12}') y", "checkId=checkId ) logger.info(sanitize_json(response)) return response def checkAssumeRoleFailure(error): if \"(AccessDenied) when calling the AssumeRole", "in d.items(): if 'AccountId' in k: d[k] = sanitize_string(v) if 'AccountName' in k:", "License. # ###################################################################################################################### import re,boto3,logging,os from datetime import date from botocore.exceptions import ClientError", "int): raise ValueError('Invalid log level: %s' % loglevel) logger.setLevel(level=numeric_level) def sanitize_json(x): d =", "str(x) if os.environ['MASK_PII'].lower() == 'true': pattern=re.compile('\\d{12}') y = re.sub(pattern,lambda match: ((match.group()[1])+'XXXXXXX'+(match.group()[-4:])), y) return", "def lambda_handler(event, context): try: logger.info(sanitize_json(event)) logger.info(\"Assume Role in child account\") roleCredentials=assumeRole(event['AccountId']) logger.info(\"Create boto3", "lambda_handler(event, context): try: logger.info(sanitize_json(event)) logger.info(\"Assume Role in child account\") roleCredentials=assumeRole(event['AccountId']) logger.info(\"Create boto3 support" ]
[ "1 old_array = self.__array self.__array = array(old_array.typecode) for i, elem in enumerate(old_array): if", "False def __add__(self, other: \"ArrayList\") -> \"ArrayList\": copy = deepcopy(self) copy += other", "other: return True return False def __ne__(self, other: \"ArrayList\") -> bool: return not", "array(self.__array.typecode, [0 for _ in range(len(self.__array) - 1)]) i = 0 is_found =", "str, *args) -> None: self.__array = array(type_char, args) def __str__(self) -> str: return", "ArrayList(object): class Iterator(object): __data: ArrayType __index: int def __init__(self, data: ArrayType): self.__data =", "index > len(self.__array) - 1: raise IndexError new_array = array(self.__array.typecode, [0 for _", "elem in self.__array: if i == index and not is_found: item = self.__array[i]", "ArrayType __index: int def __init__(self, data: ArrayType): self.__data = data self.__index = -1", "stop=None) -> int: stop = stop if stop is not None else len(self.__array)", "result = deepcopy(self) result *= mult return result def __imul__(self, mult: int) ->", "def count(self, item: T) -> int: count = 0 for elem in self.__array:", "\"ArrayList\": self.__array += other.__array return self def __lt__(self, other: \"ArrayList\") -> bool: if", "<reponame>Frumka/python_developer from array import array, ArrayType from typing import TypeVar, Iterable from copy", "= self.__array self.__array = array(old_array.typecode) for i, elem in enumerate(old_array): if i ==", "index = len(self.__array) + index if index > len(self.__array) - 1: raise IndexError", "def insert(self, index: int, item: T) -> None: if index == -1: self.append(item)", "return False def __add__(self, other: \"ArrayList\") -> \"ArrayList\": copy = deepcopy(self) copy +=", "[item]) def count(self, item: T) -> int: count = 0 for elem in", "else: not_equal = False for i, elem in enumerate(self.__array): if elem > other.__array[i]:", "is_found = True continue new_array[i] = elem i += 1 self.__array = new_array", "return count def index(self, target: T, start=0, stop=None) -> int: stop = stop", "if self.__array[i] == target: return i raise ValueError def extend(self, *args: Iterable) ->", "int) -> T: return self.__array[index] def __len__(self) -> int: return len(self.__array) def __contains__(self,", "data self.__index = len(data) def __iter__(self) -> 'ArrayList.Iterator': return self def __next__(self): if", "-> T: if index < 0: index = len(self.__array) + index if index", "Iterator(object): __data: ArrayType __index: int def __init__(self, data: ArrayType): self.__data = data self.__index", "-> bool: if len(self.__array) != len(other.__array): return False else: for i, elem in", "1 return count def index(self, target: T, start=0, stop=None) -> int: stop =", "target == item: return True return False def __add__(self, other: \"ArrayList\") -> \"ArrayList\":", "i == index and not is_found: item = self.__array[i] is_found = True continue", "False return True def __le__(self, other: \"ArrayList\") -> bool: if self < other:", "args: self.__array += array(self.__array.typecode, elem) def insert(self, index: int, item: T) -> None:", "ArrayType def __init__(self, type_char: str, *args) -> None: self.__array = array(type_char, args) def", "for item in self.__array: if target == item: return True return False def", "def __iter__(self) -> 'ArrayList.Iterator': return self def __next__(self): if self.__index == 0: raise", "False for i, elem in enumerate(self.__array): if elem > other.__array[i]: return False if", "i = 0 is_found = False for elem in self.__array: if i ==", "= deepcopy(self) copy += other return copy def __iadd__(self, other: \"ArrayList\") -> \"ArrayList\":", "i += 1 self.__array = new_array return item def remove(self, target: T) ->", "__array: ArrayType def __init__(self, type_char: str, *args) -> None: self.__array = array(type_char, args)", "int: return len(self.__array) def __contains__(self, target: T) -> bool: for item in self.__array:", "len(self.__array) def __contains__(self, target: T) -> bool: for item in self.__array: if target", "if elem > other.__array[i]: return False if elem < other.__array[i]: not_equal = True", "i raise ValueError def extend(self, *args: Iterable) -> None: for elem in args:", "% old_len] self.__array = new_array return self def append(self, item: T) -> None:", "not_equal = False for i, elem in enumerate(self.__array): if elem > other.__array[i]: return", "-> int: count = 0 for elem in self.__array: if elem == item:", "else len(self.__array) for i in range(start, stop): if self.__array[i] == target: return i", "self def __next__(self) -> T: if self.__index > len(self.__data) - 2: raise StopIteration()", "__init__(self, data: ArrayType): self.__data = data self.__index = -1 def __iter__(self) -> 'ArrayList.Iterator':", "other: \"ArrayList\") -> bool: if len(self.__array) < len(other.__array): return True elif len(self.__array) >", "other.__array[i]: return False return True def __le__(self, other: \"ArrayList\") -> bool: if self", "__reversed__(self) -> 'ArrayList.ReverseIterator': return ArrayList.ReverseIterator(self.__array) def __setitem__(self, key: int, value: T) -> None:", "new_array[i] = self.__array[i % old_len] self.__array = new_array return self def append(self, item:", "other: \"ArrayList\") -> bool: if len(self.__array) != len(other.__array): return False else: for i,", "ReverseIterator(object): __data: ArrayType __index: int def __init__(self, data: ArrayType): self.__data = data self.__index", "return False if elem < other.__array[i]: not_equal = True return not_equal def __eq__(self,", "__init__(self, type_char: str, *args) -> None: self.__array = array(type_char, args) def __str__(self) ->", "== target: return i raise ValueError def extend(self, *args: Iterable) -> None: for", "elem in enumerate(self.__array): if elem > other.__array[i]: return False if elem < other.__array[i]:", "for i in range(new_len): new_array[i] = self.__array[i % old_len] self.__array = new_array return", "for _ in range(len(self.__array) - 1)]) i = 0 is_found = False for", "return self.__data[self.__index] class ReverseIterator(object): __data: ArrayType __index: int def __init__(self, data: ArrayType): self.__data", "return self def __next__(self): if self.__index == 0: raise StopIteration() self.__index -= 1", "copy += other return copy def __iadd__(self, other: \"ArrayList\") -> \"ArrayList\": self.__array +=", "__ne__(self, other: \"ArrayList\") -> bool: return not self == other def __gt__(self, other:", "== other: return True return False def __ne__(self, other: \"ArrayList\") -> bool: return", "if elem == item: count += 1 return count def index(self, target: T,", "in self.__array: if i == index and not is_found: item = self.__array[i] is_found", "other: \"ArrayList\") -> bool: return not self == other def __gt__(self, other: \"ArrayList\")", "-> 'ArrayList.Iterator': return self def __next__(self) -> T: if self.__index > len(self.__data) -", "StopIteration() self.__index += 1 return self.__data[self.__index] class ReverseIterator(object): __data: ArrayType __index: int def", "self.__array = new_array return self def append(self, item: T) -> None: self.__array +=", "key: int, value: T) -> None: self.__array[key] = value def __delitem__(self, key: int)", "mult: int) -> \"ArrayList\": result = deepcopy(self) result *= mult return result def", "< other.__array[i]: not_equal = True return not_equal def __eq__(self, other: \"ArrayList\") -> bool:", "-> bool: return not self <= other def __ge__(self, other: \"ArrayList\") -> bool:", "= stop if stop is not None else len(self.__array) for i in range(start,", "array(self.__array.typecode, elem) def insert(self, index: int, item: T) -> None: if index ==", "-> None: for elem in args: self.__array += array(self.__array.typecode, elem) def insert(self, index:", "def __imul__(self, mult: int) -> \"ArrayList\": old_len = len(self.__array) new_len = old_len *", "elem in args: self.__array += array(self.__array.typecode, elem) def insert(self, index: int, item: T)", "> len(self.__array) - 1: raise IndexError new_array = array(self.__array.typecode, [0 for _ in", "ArrayType from typing import TypeVar, Iterable from copy import deepcopy T = TypeVar(\"T\")", "array(type_char, args) def __str__(self) -> str: return self.__array.__str__() def __getitem__(self, index: int) ->", "< other: return True elif self == other: return True return False def", "item: T) -> None: self.__array += array(self.__array.typecode, [item]) def count(self, item: T) ->", "deepcopy T = TypeVar(\"T\") class ArrayList(object): class Iterator(object): __data: ArrayType __index: int def", "for i in range(start, stop): if self.__array[i] == target: return i raise ValueError", "-> None: self.__array = array(type_char, args) def __str__(self) -> str: return self.__array.__str__() def", "0: raise StopIteration() self.__index -= 1 return self.__data[self.__index] __array: ArrayType def __init__(self, type_char:", "self.__index > len(self.__data) - 2: raise StopIteration() self.__index += 1 return self.__data[self.__index] class", "i in range(new_len): new_array[i] = self.__array[i % old_len] self.__array = new_array return self", "\"ArrayList\") -> bool: return not self < other def __mul__(self, mult: int) ->", "0 is_found = False for elem in self.__array: if i == index and", "Iterable from copy import deepcopy T = TypeVar(\"T\") class ArrayList(object): class Iterator(object): __data:", "= len(data) def __iter__(self) -> 'ArrayList.Iterator': return self def __next__(self): if self.__index ==", "extend(self, *args: Iterable) -> None: for elem in args: self.__array += array(self.__array.typecode, elem)", "return False return True def __le__(self, other: \"ArrayList\") -> bool: if self <", "other: \"ArrayList\") -> bool: return not self <= other def __ge__(self, other: \"ArrayList\")", "True elif self == other: return True return False def __ne__(self, other: \"ArrayList\")", "False def __ne__(self, other: \"ArrayList\") -> bool: return not self == other def", "__eq__(self, other: \"ArrayList\") -> bool: if len(self.__array) != len(other.__array): return False else: for", "other def __mul__(self, mult: int) -> \"ArrayList\": result = deepcopy(self) result *= mult", "copy def __iadd__(self, other: \"ArrayList\") -> \"ArrayList\": self.__array += other.__array return self def", "'ArrayList.ReverseIterator': return ArrayList.ReverseIterator(self.__array) def __setitem__(self, key: int, value: T) -> None: self.__array[key] =", "-> None: self.__array += array(self.__array.typecode, [item]) def count(self, item: T) -> int: count", "not None else len(self.__array) for i in range(start, stop): if self.__array[i] == target:", "old_len] self.__array = new_array return self def append(self, item: T) -> None: self.__array", "def __iter__(self) -> 'ArrayList.Iterator': return ArrayList.Iterator(self.__array) def pop(self, index=-1) -> T: if index", "mult return result def __imul__(self, mult: int) -> \"ArrayList\": old_len = len(self.__array) new_len", "\"ArrayList\") -> bool: if len(self.__array) < len(other.__array): return True elif len(self.__array) > len(other.__array):", "count = 0 for elem in self.__array: if elem == item: count +=", "self.__array = array(old_array.typecode) for i, elem in enumerate(old_array): if i == index: self.append(item)", "if self < other: return True elif self == other: return True return", "Iterable) -> None: for elem in args: self.__array += array(self.__array.typecode, elem) def insert(self,", "if index == -1: self.append(item) return elif index < 0: index = len(self.__array)", "return i raise ValueError def extend(self, *args: Iterable) -> None: for elem in", "for elem in self.__array: if i == index and not is_found: item =", "1 return self.__data[self.__index] __array: ArrayType def __init__(self, type_char: str, *args) -> None: self.__array", "= array(self.__array.typecode, [0 for _ in range(new_len)]) for i in range(new_len): new_array[i] =", "= new_array return item def remove(self, target: T) -> None: index = self.index(target)", "class ReverseIterator(object): __data: ArrayType __index: int def __init__(self, data: ArrayType): self.__data = data", "T = TypeVar(\"T\") class ArrayList(object): class Iterator(object): __data: ArrayType __index: int def __init__(self,", "index: int) -> T: return self.__array[index] def __len__(self) -> int: return len(self.__array) def", "None: for elem in args: self.__array += array(self.__array.typecode, elem) def insert(self, index: int,", "pop(self, index=-1) -> T: if index < 0: index = len(self.__array) + index", "T: if self.__index > len(self.__data) - 2: raise StopIteration() self.__index += 1 return", "other: \"ArrayList\") -> bool: if self < other: return True elif self ==", "raise IndexError new_array = array(self.__array.typecode, [0 for _ in range(len(self.__array) - 1)]) i", "2: raise StopIteration() self.__index += 1 return self.__data[self.__index] class ReverseIterator(object): __data: ArrayType __index:", "__iter__(self) -> 'ArrayList.Iterator': return ArrayList.Iterator(self.__array) def pop(self, index=-1) -> T: if index <", "ArrayList.Iterator(self.__array) def pop(self, index=-1) -> T: if index < 0: index = len(self.__array)", "int) -> \"ArrayList\": result = deepcopy(self) result *= mult return result def __imul__(self,", "__setitem__(self, key: int, value: T) -> None: self.__array[key] = value def __delitem__(self, key:", "self.__array[i] is_found = True continue new_array[i] = elem i += 1 self.__array =", "-> 'ArrayList.ReverseIterator': return ArrayList.ReverseIterator(self.__array) def __setitem__(self, key: int, value: T) -> None: self.__array[key]", "-> \"ArrayList\": result = deepcopy(self) result *= mult return result def __imul__(self, mult:", "in self.__array: if target == item: return True return False def __add__(self, other:", "for i, elem in enumerate(self.__array): if elem > other.__array[i]: return False if elem", "int, item: T) -> None: if index == -1: self.append(item) return elif index", "insert(self, index: int, item: T) -> None: if index == -1: self.append(item) return", "import deepcopy T = TypeVar(\"T\") class ArrayList(object): class Iterator(object): __data: ArrayType __index: int", "if elem != other.__array[i]: return False return True def __le__(self, other: \"ArrayList\") ->", "1)]) i = 0 is_found = False for elem in self.__array: if i", "self.__data = data self.__index = len(data) def __iter__(self) -> 'ArrayList.Iterator': return self def", "elem > other.__array[i]: return False if elem < other.__array[i]: not_equal = True return", "new_array = array(self.__array.typecode, [0 for _ in range(len(self.__array) - 1)]) i = 0", "\"ArrayList\") -> \"ArrayList\": self.__array += other.__array return self def __lt__(self, other: \"ArrayList\") ->", "in range(start, stop): if self.__array[i] == target: return i raise ValueError def extend(self,", "'ArrayList.Iterator': return self def __next__(self): if self.__index == 0: raise StopIteration() self.__index -=", "T) -> None: self.__array += array(self.__array.typecode, [item]) def count(self, item: T) -> int:", "int: count = 0 for elem in self.__array: if elem == item: count", "def extend(self, *args: Iterable) -> None: for elem in args: self.__array += array(self.__array.typecode,", "+= array(self.__array.typecode, [item]) def count(self, item: T) -> int: count = 0 for", "self.append(item) self.append(elem) else: self.append(elem) def __iter__(self) -> 'ArrayList.Iterator': return ArrayList.Iterator(self.__array) def pop(self, index=-1)", "bool: if len(self.__array) < len(other.__array): return True elif len(self.__array) > len(other.__array): return False", "-= 1 return self.__data[self.__index] __array: ArrayType def __init__(self, type_char: str, *args) -> None:", "return True return False def __ne__(self, other: \"ArrayList\") -> bool: return not self", "for elem in args: self.__array += array(self.__array.typecode, elem) def insert(self, index: int, item:", "typing import TypeVar, Iterable from copy import deepcopy T = TypeVar(\"T\") class ArrayList(object):", "index = len(self.__array) + index + 1 old_array = self.__array self.__array = array(old_array.typecode)", "self def __lt__(self, other: \"ArrayList\") -> bool: if len(self.__array) < len(other.__array): return True", "-> bool: if len(self.__array) < len(other.__array): return True elif len(self.__array) > len(other.__array): return", "= False for elem in self.__array: if i == index and not is_found:", "-1: self.append(item) return elif index < 0: index = len(self.__array) + index +", "enumerate(self.__array): if elem != other.__array[i]: return False return True def __le__(self, other: \"ArrayList\")", "self.__array[i] == target: return i raise ValueError def extend(self, *args: Iterable) -> None:", "ValueError def extend(self, *args: Iterable) -> None: for elem in args: self.__array +=", "for _ in range(new_len)]) for i in range(new_len): new_array[i] = self.__array[i % old_len]", "len(self.__array) < len(other.__array): return True elif len(self.__array) > len(other.__array): return False else: not_equal", "elif index < 0: index = len(self.__array) + index + 1 old_array =", "return True elif len(self.__array) > len(other.__array): return False else: not_equal = False for", "is not None else len(self.__array) for i in range(start, stop): if self.__array[i] ==", "index: int, item: T) -> None: if index == -1: self.append(item) return elif", "in range(len(self.__array) - 1)]) i = 0 is_found = False for elem in", "[0 for _ in range(len(self.__array) - 1)]) i = 0 is_found = False", "other.__array[i]: return False if elem < other.__array[i]: not_equal = True return not_equal def", "remove(self, target: T) -> None: index = self.index(target) self.pop(index) def __reversed__(self) -> 'ArrayList.ReverseIterator':", "-> \"ArrayList\": old_len = len(self.__array) new_len = old_len * mult new_array = array(self.__array.typecode,", "if i == index: self.append(item) self.append(elem) else: self.append(elem) def __iter__(self) -> 'ArrayList.Iterator': return", "__len__(self) -> int: return len(self.__array) def __contains__(self, target: T) -> bool: for item", "\"ArrayList\": copy = deepcopy(self) copy += other return copy def __iadd__(self, other: \"ArrayList\")", "append(self, item: T) -> None: self.__array += array(self.__array.typecode, [item]) def count(self, item: T)", "+= other return copy def __iadd__(self, other: \"ArrayList\") -> \"ArrayList\": self.__array += other.__array", "-> bool: if self < other: return True elif self == other: return", "return not self < other def __mul__(self, mult: int) -> \"ArrayList\": result =", "* mult new_array = array(self.__array.typecode, [0 for _ in range(new_len)]) for i in", "T, start=0, stop=None) -> int: stop = stop if stop is not None", "return True return False def __add__(self, other: \"ArrayList\") -> \"ArrayList\": copy = deepcopy(self)", "not_equal = True return not_equal def __eq__(self, other: \"ArrayList\") -> bool: if len(self.__array)", "int def __init__(self, data: ArrayType): self.__data = data self.__index = -1 def __iter__(self)", "self def append(self, item: T) -> None: self.__array += array(self.__array.typecode, [item]) def count(self,", "not is_found: item = self.__array[i] is_found = True continue new_array[i] = elem i", "len(self.__array) new_len = old_len * mult new_array = array(self.__array.typecode, [0 for _ in", "index(self, target: T, start=0, stop=None) -> int: stop = stop if stop is", "T) -> int: count = 0 for elem in self.__array: if elem ==", "data self.__index = -1 def __iter__(self) -> 'ArrayList.Iterator': return self def __next__(self) ->", "def __next__(self) -> T: if self.__index > len(self.__data) - 2: raise StopIteration() self.__index", "= elem i += 1 self.__array = new_array return item def remove(self, target:", "__str__(self) -> str: return self.__array.__str__() def __getitem__(self, index: int) -> T: return self.__array[index]", "item in self.__array: if target == item: return True return False def __add__(self,", "len(other.__array): return False else: for i, elem in enumerate(self.__array): if elem != other.__array[i]:", "True return False def __ne__(self, other: \"ArrayList\") -> bool: return not self ==", "1 return self.__data[self.__index] class ReverseIterator(object): __data: ArrayType __index: int def __init__(self, data: ArrayType):", "self.__data[self.__index] class ReverseIterator(object): __data: ArrayType __index: int def __init__(self, data: ArrayType): self.__data =", "= data self.__index = -1 def __iter__(self) -> 'ArrayList.Iterator': return self def __next__(self)", "self.__data = data self.__index = -1 def __iter__(self) -> 'ArrayList.Iterator': return self def", "'ArrayList.Iterator': return self def __next__(self) -> T: if self.__index > len(self.__data) - 2:", "self.__array: if elem == item: count += 1 return count def index(self, target:", "str: return self.__array.__str__() def __getitem__(self, index: int) -> T: return self.__array[index] def __len__(self)", "__index: int def __init__(self, data: ArrayType): self.__data = data self.__index = -1 def", "== index and not is_found: item = self.__array[i] is_found = True continue new_array[i]", "__le__(self, other: \"ArrayList\") -> bool: if self < other: return True elif self", "> other.__array[i]: return False if elem < other.__array[i]: not_equal = True return not_equal", "= self.__array[i % old_len] self.__array = new_array return self def append(self, item: T)", "-> None: if index == -1: self.append(item) return elif index < 0: index", "deepcopy(self) copy += other return copy def __iadd__(self, other: \"ArrayList\") -> \"ArrayList\": self.__array", "__init__(self, data: ArrayType): self.__data = data self.__index = len(data) def __iter__(self) -> 'ArrayList.Iterator':", "def __init__(self, data: ArrayType): self.__data = data self.__index = len(data) def __iter__(self) ->", "-> T: return self.__array[index] def __len__(self) -> int: return len(self.__array) def __contains__(self, target:", "i in range(start, stop): if self.__array[i] == target: return i raise ValueError def", "enumerate(self.__array): if elem > other.__array[i]: return False if elem < other.__array[i]: not_equal =", "array(self.__array.typecode, [item]) def count(self, item: T) -> int: count = 0 for elem", "start=0, stop=None) -> int: stop = stop if stop is not None else", "-> bool: return not self < other def __mul__(self, mult: int) -> \"ArrayList\":", "self.__array = array(type_char, args) def __str__(self) -> str: return self.__array.__str__() def __getitem__(self, index:", "not_equal def __eq__(self, other: \"ArrayList\") -> bool: if len(self.__array) != len(other.__array): return False", "\"ArrayList\") -> bool: if len(self.__array) != len(other.__array): return False else: for i, elem", "return False else: not_equal = False for i, elem in enumerate(self.__array): if elem", "other: \"ArrayList\") -> \"ArrayList\": copy = deepcopy(self) copy += other return copy def", "self.append(elem) else: self.append(elem) def __iter__(self) -> 'ArrayList.Iterator': return ArrayList.Iterator(self.__array) def pop(self, index=-1) ->", "from typing import TypeVar, Iterable from copy import deepcopy T = TypeVar(\"T\") class", "raise ValueError def extend(self, *args: Iterable) -> None: for elem in args: self.__array", "value: T) -> None: self.__array[key] = value def __delitem__(self, key: int) -> None:", "def __lt__(self, other: \"ArrayList\") -> bool: if len(self.__array) < len(other.__array): return True elif", "target: return i raise ValueError def extend(self, *args: Iterable) -> None: for elem", "= -1 def __iter__(self) -> 'ArrayList.Iterator': return self def __next__(self) -> T: if", "i, elem in enumerate(self.__array): if elem != other.__array[i]: return False return True def", "data: ArrayType): self.__data = data self.__index = len(data) def __iter__(self) -> 'ArrayList.Iterator': return", "__iadd__(self, other: \"ArrayList\") -> \"ArrayList\": self.__array += other.__array return self def __lt__(self, other:", "\"ArrayList\": old_len = len(self.__array) new_len = old_len * mult new_array = array(self.__array.typecode, [0", "if stop is not None else len(self.__array) for i in range(start, stop): if", "elem i += 1 self.__array = new_array return item def remove(self, target: T)", "int, value: T) -> None: self.__array[key] = value def __delitem__(self, key: int) ->", "\"ArrayList\") -> bool: return not self <= other def __ge__(self, other: \"ArrayList\") ->", "old_array = self.__array self.__array = array(old_array.typecode) for i, elem in enumerate(old_array): if i", "+ index if index > len(self.__array) - 1: raise IndexError new_array = array(self.__array.typecode,", "*= mult return result def __imul__(self, mult: int) -> \"ArrayList\": old_len = len(self.__array)", "other.__array[i]: not_equal = True return not_equal def __eq__(self, other: \"ArrayList\") -> bool: if", "return self def __next__(self) -> T: if self.__index > len(self.__data) - 2: raise", "-> int: return len(self.__array) def __contains__(self, target: T) -> bool: for item in", "return ArrayList.ReverseIterator(self.__array) def __setitem__(self, key: int, value: T) -> None: self.__array[key] = value", "False else: not_equal = False for i, elem in enumerate(self.__array): if elem >", "True return not_equal def __eq__(self, other: \"ArrayList\") -> bool: if len(self.__array) != len(other.__array):", "elif len(self.__array) > len(other.__array): return False else: not_equal = False for i, elem", "return False def __ne__(self, other: \"ArrayList\") -> bool: return not self == other", "False else: for i, elem in enumerate(self.__array): if elem != other.__array[i]: return False", "self < other def __mul__(self, mult: int) -> \"ArrayList\": result = deepcopy(self) result", "int def __init__(self, data: ArrayType): self.__data = data self.__index = len(data) def __iter__(self)", "== item: count += 1 return count def index(self, target: T, start=0, stop=None)", "- 2: raise StopIteration() self.__index += 1 return self.__data[self.__index] class ReverseIterator(object): __data: ArrayType", "copy = deepcopy(self) copy += other return copy def __iadd__(self, other: \"ArrayList\") ->", "def __next__(self): if self.__index == 0: raise StopIteration() self.__index -= 1 return self.__data[self.__index]", "if target == item: return True return False def __add__(self, other: \"ArrayList\") ->", "__lt__(self, other: \"ArrayList\") -> bool: if len(self.__array) < len(other.__array): return True elif len(self.__array)", "False if elem < other.__array[i]: not_equal = True return not_equal def __eq__(self, other:", "+= 1 return count def index(self, target: T, start=0, stop=None) -> int: stop", "T) -> None: if index == -1: self.append(item) return elif index < 0:", "self < other: return True elif self == other: return True return False", "in range(new_len)]) for i in range(new_len): new_array[i] = self.__array[i % old_len] self.__array =", "elem in enumerate(self.__array): if elem != other.__array[i]: return False return True def __le__(self,", "item: return True return False def __add__(self, other: \"ArrayList\") -> \"ArrayList\": copy =", "return ArrayList.Iterator(self.__array) def pop(self, index=-1) -> T: if index < 0: index =", "-1 def __iter__(self) -> 'ArrayList.Iterator': return self def __next__(self) -> T: if self.__index", "range(new_len): new_array[i] = self.__array[i % old_len] self.__array = new_array return self def append(self,", "result *= mult return result def __imul__(self, mult: int) -> \"ArrayList\": old_len =", "mult: int) -> \"ArrayList\": old_len = len(self.__array) new_len = old_len * mult new_array", "= data self.__index = len(data) def __iter__(self) -> 'ArrayList.Iterator': return self def __next__(self):", "in enumerate(self.__array): if elem > other.__array[i]: return False if elem < other.__array[i]: not_equal", "return True elif self == other: return True return False def __ne__(self, other:", "return self.__data[self.__index] __array: ArrayType def __init__(self, type_char: str, *args) -> None: self.__array =", "= self.index(target) self.pop(index) def __reversed__(self) -> 'ArrayList.ReverseIterator': return ArrayList.ReverseIterator(self.__array) def __setitem__(self, key: int,", "__imul__(self, mult: int) -> \"ArrayList\": old_len = len(self.__array) new_len = old_len * mult", "-> bool: for item in self.__array: if target == item: return True return", "False for elem in self.__array: if i == index and not is_found: item", "in enumerate(old_array): if i == index: self.append(item) self.append(elem) else: self.append(elem) def __iter__(self) ->", "_ in range(new_len)]) for i in range(new_len): new_array[i] = self.__array[i % old_len] self.__array", "ArrayType): self.__data = data self.__index = len(data) def __iter__(self) -> 'ArrayList.Iterator': return self", "self.append(item) return elif index < 0: index = len(self.__array) + index + 1", "== -1: self.append(item) return elif index < 0: index = len(self.__array) + index", "self.__array += array(self.__array.typecode, [item]) def count(self, item: T) -> int: count = 0", "def __iter__(self) -> 'ArrayList.Iterator': return self def __next__(self) -> T: if self.__index >", "from copy import deepcopy T = TypeVar(\"T\") class ArrayList(object): class Iterator(object): __data: ArrayType", "= len(self.__array) + index + 1 old_array = self.__array self.__array = array(old_array.typecode) for", "and not is_found: item = self.__array[i] is_found = True continue new_array[i] = elem", "self def __next__(self): if self.__index == 0: raise StopIteration() self.__index -= 1 return", "def __mul__(self, mult: int) -> \"ArrayList\": result = deepcopy(self) result *= mult return", "other return copy def __iadd__(self, other: \"ArrayList\") -> \"ArrayList\": self.__array += other.__array return", "__gt__(self, other: \"ArrayList\") -> bool: return not self <= other def __ge__(self, other:", "= True return not_equal def __eq__(self, other: \"ArrayList\") -> bool: if len(self.__array) !=", "class ArrayList(object): class Iterator(object): __data: ArrayType __index: int def __init__(self, data: ArrayType): self.__data", "+= array(self.__array.typecode, elem) def insert(self, index: int, item: T) -> None: if index", "0: index = len(self.__array) + index if index > len(self.__array) - 1: raise", "+ 1 old_array = self.__array self.__array = array(old_array.typecode) for i, elem in enumerate(old_array):", "0: index = len(self.__array) + index + 1 old_array = self.__array self.__array =", "self.__array = new_array return item def remove(self, target: T) -> None: index =", "not self <= other def __ge__(self, other: \"ArrayList\") -> bool: return not self", "[0 for _ in range(new_len)]) for i in range(new_len): new_array[i] = self.__array[i %", "index=-1) -> T: if index < 0: index = len(self.__array) + index if", "*args: Iterable) -> None: for elem in args: self.__array += array(self.__array.typecode, elem) def", "int) -> \"ArrayList\": old_len = len(self.__array) new_len = old_len * mult new_array =", "def __iadd__(self, other: \"ArrayList\") -> \"ArrayList\": self.__array += other.__array return self def __lt__(self,", "__index: int def __init__(self, data: ArrayType): self.__data = data self.__index = len(data) def", "for i, elem in enumerate(old_array): if i == index: self.append(item) self.append(elem) else: self.append(elem)", "index = self.index(target) self.pop(index) def __reversed__(self) -> 'ArrayList.ReverseIterator': return ArrayList.ReverseIterator(self.__array) def __setitem__(self, key:", "> len(other.__array): return False else: not_equal = False for i, elem in enumerate(self.__array):", "def __gt__(self, other: \"ArrayList\") -> bool: return not self <= other def __ge__(self,", "in range(new_len): new_array[i] = self.__array[i % old_len] self.__array = new_array return self def", "!= len(other.__array): return False else: for i, elem in enumerate(self.__array): if elem !=", "len(self.__array) != len(other.__array): return False else: for i, elem in enumerate(self.__array): if elem", "None: index = self.index(target) self.pop(index) def __reversed__(self) -> 'ArrayList.ReverseIterator': return ArrayList.ReverseIterator(self.__array) def __setitem__(self,", "other: \"ArrayList\") -> bool: return not self < other def __mul__(self, mult: int)", "-> bool: return not self == other def __gt__(self, other: \"ArrayList\") -> bool:", "len(self.__array) + index if index > len(self.__array) - 1: raise IndexError new_array =", "= 0 is_found = False for elem in self.__array: if i == index", "self.__index == 0: raise StopIteration() self.__index -= 1 return self.__data[self.__index] __array: ArrayType def", "self == other def __gt__(self, other: \"ArrayList\") -> bool: return not self <=", "other def __gt__(self, other: \"ArrayList\") -> bool: return not self <= other def", "T) -> None: index = self.index(target) self.pop(index) def __reversed__(self) -> 'ArrayList.ReverseIterator': return ArrayList.ReverseIterator(self.__array)", "stop if stop is not None else len(self.__array) for i in range(start, stop):", "array import array, ArrayType from typing import TypeVar, Iterable from copy import deepcopy", "in self.__array: if elem == item: count += 1 return count def index(self,", "new_array return self def append(self, item: T) -> None: self.__array += array(self.__array.typecode, [item])", "i == index: self.append(item) self.append(elem) else: self.append(elem) def __iter__(self) -> 'ArrayList.Iterator': return ArrayList.Iterator(self.__array)", "def __str__(self) -> str: return self.__array.__str__() def __getitem__(self, index: int) -> T: return", "if index > len(self.__array) - 1: raise IndexError new_array = array(self.__array.typecode, [0 for", "target: T) -> bool: for item in self.__array: if target == item: return", "index + 1 old_array = self.__array self.__array = array(old_array.typecode) for i, elem in", "bool: return not self <= other def __ge__(self, other: \"ArrayList\") -> bool: return", "other.__array return self def __lt__(self, other: \"ArrayList\") -> bool: if len(self.__array) < len(other.__array):", "range(start, stop): if self.__array[i] == target: return i raise ValueError def extend(self, *args:", "count += 1 return count def index(self, target: T, start=0, stop=None) -> int:", "return result def __imul__(self, mult: int) -> \"ArrayList\": old_len = len(self.__array) new_len =", "self.__array += array(self.__array.typecode, elem) def insert(self, index: int, item: T) -> None: if", "return not_equal def __eq__(self, other: \"ArrayList\") -> bool: if len(self.__array) != len(other.__array): return", "len(other.__array): return False else: not_equal = False for i, elem in enumerate(self.__array): if", "count(self, item: T) -> int: count = 0 for elem in self.__array: if", "for elem in self.__array: if elem == item: count += 1 return count", "self.__index = -1 def __iter__(self) -> 'ArrayList.Iterator': return self def __next__(self) -> T:", "target: T) -> None: index = self.index(target) self.pop(index) def __reversed__(self) -> 'ArrayList.ReverseIterator': return", "-> None: index = self.index(target) self.pop(index) def __reversed__(self) -> 'ArrayList.ReverseIterator': return ArrayList.ReverseIterator(self.__array) def", "T) -> None: self.__array[key] = value def __delitem__(self, key: int) -> None: self.pop(key)", "from array import array, ArrayType from typing import TypeVar, Iterable from copy import", "== item: return True return False def __add__(self, other: \"ArrayList\") -> \"ArrayList\": copy", "not self == other def __gt__(self, other: \"ArrayList\") -> bool: return not self", "result def __imul__(self, mult: int) -> \"ArrayList\": old_len = len(self.__array) new_len = old_len", "i, elem in enumerate(old_array): if i == index: self.append(item) self.append(elem) else: self.append(elem) def", "if i == index and not is_found: item = self.__array[i] is_found = True", "self.index(target) self.pop(index) def __reversed__(self) -> 'ArrayList.ReverseIterator': return ArrayList.ReverseIterator(self.__array) def __setitem__(self, key: int, value:", "= TypeVar(\"T\") class ArrayList(object): class Iterator(object): __data: ArrayType __index: int def __init__(self, data:", "stop is not None else len(self.__array) for i in range(start, stop): if self.__array[i]", "is_found: item = self.__array[i] is_found = True continue new_array[i] = elem i +=", "def __init__(self, type_char: str, *args) -> None: self.__array = array(type_char, args) def __str__(self)", "-> \"ArrayList\": self.__array += other.__array return self def __lt__(self, other: \"ArrayList\") -> bool:", "elem == item: count += 1 return count def index(self, target: T, start=0,", "T: return self.__array[index] def __len__(self) -> int: return len(self.__array) def __contains__(self, target: T)", "enumerate(old_array): if i == index: self.append(item) self.append(elem) else: self.append(elem) def __iter__(self) -> 'ArrayList.Iterator':", "True def __le__(self, other: \"ArrayList\") -> bool: if self < other: return True", "def remove(self, target: T) -> None: index = self.index(target) self.pop(index) def __reversed__(self) ->", "-> str: return self.__array.__str__() def __getitem__(self, index: int) -> T: return self.__array[index] def", "if self.__index > len(self.__data) - 2: raise StopIteration() self.__index += 1 return self.__data[self.__index]", "__contains__(self, target: T) -> bool: for item in self.__array: if target == item:", "other def __ge__(self, other: \"ArrayList\") -> bool: return not self < other def", "= self.__array[i] is_found = True continue new_array[i] = elem i += 1 self.__array", "self.__array.__str__() def __getitem__(self, index: int) -> T: return self.__array[index] def __len__(self) -> int:", "if len(self.__array) != len(other.__array): return False else: for i, elem in enumerate(self.__array): if", "index < 0: index = len(self.__array) + index if index > len(self.__array) -", "== index: self.append(item) self.append(elem) else: self.append(elem) def __iter__(self) -> 'ArrayList.Iterator': return ArrayList.Iterator(self.__array) def", "deepcopy(self) result *= mult return result def __imul__(self, mult: int) -> \"ArrayList\": old_len", "def __contains__(self, target: T) -> bool: for item in self.__array: if target ==", "args) def __str__(self) -> str: return self.__array.__str__() def __getitem__(self, index: int) -> T:", "data: ArrayType): self.__data = data self.__index = -1 def __iter__(self) -> 'ArrayList.Iterator': return", "other: \"ArrayList\") -> \"ArrayList\": self.__array += other.__array return self def __lt__(self, other: \"ArrayList\")", "def __le__(self, other: \"ArrayList\") -> bool: if self < other: return True elif", "__iter__(self) -> 'ArrayList.Iterator': return self def __next__(self): if self.__index == 0: raise StopIteration()", "= True continue new_array[i] = elem i += 1 self.__array = new_array return", "if elem < other.__array[i]: not_equal = True return not_equal def __eq__(self, other: \"ArrayList\")", "> len(self.__data) - 2: raise StopIteration() self.__index += 1 return self.__data[self.__index] class ReverseIterator(object):", "< 0: index = len(self.__array) + index + 1 old_array = self.__array self.__array", "item: count += 1 return count def index(self, target: T, start=0, stop=None) ->", "'ArrayList.Iterator': return ArrayList.Iterator(self.__array) def pop(self, index=-1) -> T: if index < 0: index", "target: T, start=0, stop=None) -> int: stop = stop if stop is not", "+= 1 self.__array = new_array return item def remove(self, target: T) -> None:", "__iter__(self) -> 'ArrayList.Iterator': return self def __next__(self) -> T: if self.__index > len(self.__data)", "!= other.__array[i]: return False return True def __le__(self, other: \"ArrayList\") -> bool: if", "-> \"ArrayList\": copy = deepcopy(self) copy += other return copy def __iadd__(self, other:", "import array, ArrayType from typing import TypeVar, Iterable from copy import deepcopy T", "def __ne__(self, other: \"ArrayList\") -> bool: return not self == other def __gt__(self,", "ArrayType): self.__data = data self.__index = -1 def __iter__(self) -> 'ArrayList.Iterator': return self", "self.pop(index) def __reversed__(self) -> 'ArrayList.ReverseIterator': return ArrayList.ReverseIterator(self.__array) def __setitem__(self, key: int, value: T)", "< len(other.__array): return True elif len(self.__array) > len(other.__array): return False else: not_equal =", "== 0: raise StopIteration() self.__index -= 1 return self.__data[self.__index] __array: ArrayType def __init__(self,", "def index(self, target: T, start=0, stop=None) -> int: stop = stop if stop", "def __setitem__(self, key: int, value: T) -> None: self.__array[key] = value def __delitem__(self,", "_ in range(len(self.__array) - 1)]) i = 0 is_found = False for elem", "in args: self.__array += array(self.__array.typecode, elem) def insert(self, index: int, item: T) ->", "return self def __lt__(self, other: \"ArrayList\") -> bool: if len(self.__array) < len(other.__array): return", "\"ArrayList\") -> bool: return not self == other def __gt__(self, other: \"ArrayList\") ->", "IndexError new_array = array(self.__array.typecode, [0 for _ in range(len(self.__array) - 1)]) i =", "def __add__(self, other: \"ArrayList\") -> \"ArrayList\": copy = deepcopy(self) copy += other return", "copy import deepcopy T = TypeVar(\"T\") class ArrayList(object): class Iterator(object): __data: ArrayType __index:", "for i, elem in enumerate(self.__array): if elem != other.__array[i]: return False return True", "+ index + 1 old_array = self.__array self.__array = array(old_array.typecode) for i, elem", "-> int: stop = stop if stop is not None else len(self.__array) for", "TypeVar(\"T\") class ArrayList(object): class Iterator(object): __data: ArrayType __index: int def __init__(self, data: ArrayType):", "ArrayList.ReverseIterator(self.__array) def __setitem__(self, key: int, value: T) -> None: self.__array[key] = value def", "<= other def __ge__(self, other: \"ArrayList\") -> bool: return not self < other", "in enumerate(self.__array): if elem != other.__array[i]: return False return True def __le__(self, other:", "+= 1 return self.__data[self.__index] class ReverseIterator(object): __data: ArrayType __index: int def __init__(self, data:", "i, elem in enumerate(self.__array): if elem > other.__array[i]: return False if elem <", "= len(self.__array) + index if index > len(self.__array) - 1: raise IndexError new_array", "T) -> bool: for item in self.__array: if target == item: return True", "None: self.__array += array(self.__array.typecode, [item]) def count(self, item: T) -> int: count =", "else: self.append(elem) def __iter__(self) -> 'ArrayList.Iterator': return ArrayList.Iterator(self.__array) def pop(self, index=-1) -> T:", "= False for i, elem in enumerate(self.__array): if elem > other.__array[i]: return False", "stop = stop if stop is not None else len(self.__array) for i in", "None else len(self.__array) for i in range(start, stop): if self.__array[i] == target: return", "is_found = False for elem in self.__array: if i == index and not", "< 0: index = len(self.__array) + index if index > len(self.__array) - 1:", "-> T: if self.__index > len(self.__data) - 2: raise StopIteration() self.__index += 1", "self.__array += other.__array return self def __lt__(self, other: \"ArrayList\") -> bool: if len(self.__array)", "count def index(self, target: T, start=0, stop=None) -> int: stop = stop if", "= old_len * mult new_array = array(self.__array.typecode, [0 for _ in range(new_len)]) for", "= len(self.__array) new_len = old_len * mult new_array = array(self.__array.typecode, [0 for _", "= array(self.__array.typecode, [0 for _ in range(len(self.__array) - 1)]) i = 0 is_found", "self.__array: if target == item: return True return False def __add__(self, other: \"ArrayList\")", "range(new_len)]) for i in range(new_len): new_array[i] = self.__array[i % old_len] self.__array = new_array", "len(other.__array): return True elif len(self.__array) > len(other.__array): return False else: not_equal = False", "\"ArrayList\") -> \"ArrayList\": copy = deepcopy(self) copy += other return copy def __iadd__(self,", "index == -1: self.append(item) return elif index < 0: index = len(self.__array) +", "= array(type_char, args) def __str__(self) -> str: return self.__array.__str__() def __getitem__(self, index: int)", "len(self.__array) - 1: raise IndexError new_array = array(self.__array.typecode, [0 for _ in range(len(self.__array)", "len(self.__array) > len(other.__array): return False else: not_equal = False for i, elem in", "item = self.__array[i] is_found = True continue new_array[i] = elem i += 1", "return self.__array.__str__() def __getitem__(self, index: int) -> T: return self.__array[index] def __len__(self) ->", "__next__(self): if self.__index == 0: raise StopIteration() self.__index -= 1 return self.__data[self.__index] __array:", "StopIteration() self.__index -= 1 return self.__data[self.__index] __array: ArrayType def __init__(self, type_char: str, *args)", "index: self.append(item) self.append(elem) else: self.append(elem) def __iter__(self) -> 'ArrayList.Iterator': return ArrayList.Iterator(self.__array) def pop(self,", "return not self == other def __gt__(self, other: \"ArrayList\") -> bool: return not", "\"ArrayList\": result = deepcopy(self) result *= mult return result def __imul__(self, mult: int)", "new_len = old_len * mult new_array = array(self.__array.typecode, [0 for _ in range(new_len)])", "old_len = len(self.__array) new_len = old_len * mult new_array = array(self.__array.typecode, [0 for", "range(len(self.__array) - 1)]) i = 0 is_found = False for elem in self.__array:", "elem in self.__array: if elem == item: count += 1 return count def", "__getitem__(self, index: int) -> T: return self.__array[index] def __len__(self) -> int: return len(self.__array)", "TypeVar, Iterable from copy import deepcopy T = TypeVar(\"T\") class ArrayList(object): class Iterator(object):", "index if index > len(self.__array) - 1: raise IndexError new_array = array(self.__array.typecode, [0", "__ge__(self, other: \"ArrayList\") -> bool: return not self < other def __mul__(self, mult:", "return item def remove(self, target: T) -> None: index = self.index(target) self.pop(index) def", "len(self.__array) + index + 1 old_array = self.__array self.__array = array(old_array.typecode) for i,", "self == other: return True return False def __ne__(self, other: \"ArrayList\") -> bool:", "import TypeVar, Iterable from copy import deepcopy T = TypeVar(\"T\") class ArrayList(object): class", "None: self.__array = array(type_char, args) def __str__(self) -> str: return self.__array.__str__() def __getitem__(self,", "< other def __mul__(self, mult: int) -> \"ArrayList\": result = deepcopy(self) result *=", "def __reversed__(self) -> 'ArrayList.ReverseIterator': return ArrayList.ReverseIterator(self.__array) def __setitem__(self, key: int, value: T) ->", "len(self.__data) - 2: raise StopIteration() self.__index += 1 return self.__data[self.__index] class ReverseIterator(object): __data:", "self.__array self.__array = array(old_array.typecode) for i, elem in enumerate(old_array): if i == index:", "= 0 for elem in self.__array: if elem == item: count += 1", "new_array = array(self.__array.typecode, [0 for _ in range(new_len)]) for i in range(new_len): new_array[i]", "self.__array: if i == index and not is_found: item = self.__array[i] is_found =", "item: T) -> int: count = 0 for elem in self.__array: if elem", "T: if index < 0: index = len(self.__array) + index if index >", "1: raise IndexError new_array = array(self.__array.typecode, [0 for _ in range(len(self.__array) - 1)])", "raise StopIteration() self.__index -= 1 return self.__data[self.__index] __array: ArrayType def __init__(self, type_char: str,", "bool: return not self < other def __mul__(self, mult: int) -> \"ArrayList\": result", "return len(self.__array) def __contains__(self, target: T) -> bool: for item in self.__array: if", "item def remove(self, target: T) -> None: index = self.index(target) self.pop(index) def __reversed__(self)", "def __getitem__(self, index: int) -> T: return self.__array[index] def __len__(self) -> int: return", "def pop(self, index=-1) -> T: if index < 0: index = len(self.__array) +", "- 1: raise IndexError new_array = array(self.__array.typecode, [0 for _ in range(len(self.__array) -", "raise StopIteration() self.__index += 1 return self.__data[self.__index] class ReverseIterator(object): __data: ArrayType __index: int", "= new_array return self def append(self, item: T) -> None: self.__array += array(self.__array.typecode,", "elif self == other: return True return False def __ne__(self, other: \"ArrayList\") ->", "array(old_array.typecode) for i, elem in enumerate(old_array): if i == index: self.append(item) self.append(elem) else:", "mult new_array = array(self.__array.typecode, [0 for _ in range(new_len)]) for i in range(new_len):", "= array(old_array.typecode) for i, elem in enumerate(old_array): if i == index: self.append(item) self.append(elem)", "__add__(self, other: \"ArrayList\") -> \"ArrayList\": copy = deepcopy(self) copy += other return copy", "self.__index += 1 return self.__data[self.__index] class ReverseIterator(object): __data: ArrayType __index: int def __init__(self,", "True elif len(self.__array) > len(other.__array): return False else: not_equal = False for i,", "if self.__index == 0: raise StopIteration() self.__index -= 1 return self.__data[self.__index] __array: ArrayType", "*args) -> None: self.__array = array(type_char, args) def __str__(self) -> str: return self.__array.__str__()", "__mul__(self, mult: int) -> \"ArrayList\": result = deepcopy(self) result *= mult return result", "other: return True elif self == other: return True return False def __ne__(self,", "new_array[i] = elem i += 1 self.__array = new_array return item def remove(self,", "def __eq__(self, other: \"ArrayList\") -> bool: if len(self.__array) != len(other.__array): return False else:", "bool: return not self == other def __gt__(self, other: \"ArrayList\") -> bool: return", "len(self.__array) for i in range(start, stop): if self.__array[i] == target: return i raise", "-> 'ArrayList.Iterator': return ArrayList.Iterator(self.__array) def pop(self, index=-1) -> T: if index < 0:", "elem) def insert(self, index: int, item: T) -> None: if index == -1:", "True return False def __add__(self, other: \"ArrayList\") -> \"ArrayList\": copy = deepcopy(self) copy", "stop): if self.__array[i] == target: return i raise ValueError def extend(self, *args: Iterable)", "self.__index = len(data) def __iter__(self) -> 'ArrayList.Iterator': return self def __next__(self): if self.__index", "if len(self.__array) < len(other.__array): return True elif len(self.__array) > len(other.__array): return False else:", "item: T) -> None: if index == -1: self.append(item) return elif index <", "1 self.__array = new_array return item def remove(self, target: T) -> None: index", "+= other.__array return self def __lt__(self, other: \"ArrayList\") -> bool: if len(self.__array) <", "return True def __le__(self, other: \"ArrayList\") -> bool: if self < other: return", "__next__(self) -> T: if self.__index > len(self.__data) - 2: raise StopIteration() self.__index +=", "ArrayType __index: int def __init__(self, data: ArrayType): self.__data = data self.__index = len(data)", "else: for i, elem in enumerate(self.__array): if elem != other.__array[i]: return False return", "def __ge__(self, other: \"ArrayList\") -> bool: return not self < other def __mul__(self,", "self.__array[index] def __len__(self) -> int: return len(self.__array) def __contains__(self, target: T) -> bool:", "elem in enumerate(old_array): if i == index: self.append(item) self.append(elem) else: self.append(elem) def __iter__(self)", "= deepcopy(self) result *= mult return result def __imul__(self, mult: int) -> \"ArrayList\":", "def __len__(self) -> int: return len(self.__array) def __contains__(self, target: T) -> bool: for", "array(self.__array.typecode, [0 for _ in range(new_len)]) for i in range(new_len): new_array[i] = self.__array[i", "class Iterator(object): __data: ArrayType __index: int def __init__(self, data: ArrayType): self.__data = data", "len(data) def __iter__(self) -> 'ArrayList.Iterator': return self def __next__(self): if self.__index == 0:", "array, ArrayType from typing import TypeVar, Iterable from copy import deepcopy T =", "return not self <= other def __ge__(self, other: \"ArrayList\") -> bool: return not", "not self < other def __mul__(self, mult: int) -> \"ArrayList\": result = deepcopy(self)", "continue new_array[i] = elem i += 1 self.__array = new_array return item def", "type_char: str, *args) -> None: self.__array = array(type_char, args) def __str__(self) -> str:", "== other def __gt__(self, other: \"ArrayList\") -> bool: return not self <= other", "- 1)]) i = 0 is_found = False for elem in self.__array: if", "elem < other.__array[i]: not_equal = True return not_equal def __eq__(self, other: \"ArrayList\") ->", "return self.__array[index] def __len__(self) -> int: return len(self.__array) def __contains__(self, target: T) ->", "index < 0: index = len(self.__array) + index + 1 old_array = self.__array", "True continue new_array[i] = elem i += 1 self.__array = new_array return item", "bool: for item in self.__array: if target == item: return True return False", "self.append(elem) def __iter__(self) -> 'ArrayList.Iterator': return ArrayList.Iterator(self.__array) def pop(self, index=-1) -> T: if", "self.__array[i % old_len] self.__array = new_array return self def append(self, item: T) ->", "old_len * mult new_array = array(self.__array.typecode, [0 for _ in range(new_len)]) for i", "bool: if len(self.__array) != len(other.__array): return False else: for i, elem in enumerate(self.__array):", "new_array return item def remove(self, target: T) -> None: index = self.index(target) self.pop(index)", "index and not is_found: item = self.__array[i] is_found = True continue new_array[i] =", "return copy def __iadd__(self, other: \"ArrayList\") -> \"ArrayList\": self.__array += other.__array return self", "__data: ArrayType __index: int def __init__(self, data: ArrayType): self.__data = data self.__index =", "int: stop = stop if stop is not None else len(self.__array) for i", "self <= other def __ge__(self, other: \"ArrayList\") -> bool: return not self <", "def __init__(self, data: ArrayType): self.__data = data self.__index = -1 def __iter__(self) ->", "if index < 0: index = len(self.__array) + index if index > len(self.__array)", "self.__data[self.__index] __array: ArrayType def __init__(self, type_char: str, *args) -> None: self.__array = array(type_char,", "elem != other.__array[i]: return False return True def __le__(self, other: \"ArrayList\") -> bool:", "\"ArrayList\") -> bool: if self < other: return True elif self == other:", "bool: if self < other: return True elif self == other: return True", "-> 'ArrayList.Iterator': return self def __next__(self): if self.__index == 0: raise StopIteration() self.__index", "return self def append(self, item: T) -> None: self.__array += array(self.__array.typecode, [item]) def", "def append(self, item: T) -> None: self.__array += array(self.__array.typecode, [item]) def count(self, item:", "return elif index < 0: index = len(self.__array) + index + 1 old_array", "self.__index -= 1 return self.__data[self.__index] __array: ArrayType def __init__(self, type_char: str, *args) ->", "return False else: for i, elem in enumerate(self.__array): if elem != other.__array[i]: return", "None: if index == -1: self.append(item) return elif index < 0: index =", "0 for elem in self.__array: if elem == item: count += 1 return" ]
[ "overrides.append([\"exp_cfg.exp_cfg.ntrain_iters\", str(last_iter)]) overrides.append([\"exp_cfg.log_cfg.nrecord\", str(nrecord)]) overrides.append([\"exp_cfg.log_cfg.rawdir\", str(rawdir)]) cfg = create_config(env, ctrl_type, ctrl_args, overrides, logdir)", "except ValueError: print(\"Please use y/n or yes/no.\\n\") if __name__ == \"__main__\": parser =", "overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.load_model\", \"True\"]) overrides.append([\"ctrl_cfg.prop_cfg.model_pretrained\", \"True\"]) overrides.append([\"exp_cfg.exp_cfg.ninit_rollouts\", \"0\"]) overrides.append([\"exp_cfg.exp_cfg.init_iter\", str(init_iter)]) overrides.append([\"exp_cfg.exp_cfg.ntrain_iters\", str(last_iter)]) overrides.append([\"exp_cfg.log_cfg.nrecord\", str(nrecord)]) overrides.append([\"exp_cfg.log_cfg.rawdir\",", "as f: f.write(pprint.pformat(cfg.toDict())) exp.run_experiment() print(\"Saved to\") print(exp.logdir) def user_prompt(question: str) -> bool: \"\"\"", "yes/no-*question* to the user. https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input \"\"\" from distutils.util import strtobool while True: user_input", "parser.parse_args() main( args.env, \"MPC\", args.ctrl_arg, args.override, args.model_dir, args.logdir, args.init_iter, args.last_iter, args.nrecord, not args.no_raw_dir,", "from __future__ import division from __future__ import print_function from __future__ import absolute_import import", "rawdir, ): ctrl_args = DotMap(**{key: val for (key, val) in ctrl_args}) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.model_dir\", model_dir])", "import DotMap from dmbrl.misc.MBExp import MBExperiment from dmbrl.controllers.MPC import MPC from dmbrl.config import", "MPC(cfg.ctrl_cfg) exp = MBExperiment(cfg.exp_cfg) if os.path.exists(exp.logdir): overwrite = user_prompt( \"{} already exists. Overwrite?\".format(exp.logdir)", "import os import argparse import pprint from dotmap import DotMap from dmbrl.misc.MBExp import", "last_iter, nrecord, rawdir, ): ctrl_args = DotMap(**{key: val for (key, val) in ctrl_args})", "import print_function from __future__ import absolute_import import os import argparse import pprint from", "use y/n or yes/no.\\n\") if __name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument('-env', type=str,", "== \"MPC\": cfg.exp_cfg.exp_cfg.policy = MPC(cfg.ctrl_cfg) exp = MBExperiment(cfg.exp_cfg) if os.path.exists(exp.logdir): overwrite = user_prompt(", "from dotmap import DotMap from dmbrl.misc.MBExp import MBExperiment from dmbrl.controllers.MPC import MPC from", "Prompt the yes/no-*question* to the user. https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input \"\"\" from distutils.util import strtobool while", "\").lower() try: result = strtobool(user_input) return result except ValueError: print(\"Please use y/n or", "-> bool: \"\"\" Prompt the yes/no-*question* to the user. https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input \"\"\" from distutils.util", "create_config(env, ctrl_type, ctrl_args, overrides, logdir) cfg.pprint() if ctrl_type == \"MPC\": cfg.exp_cfg.exp_cfg.policy = MPC(cfg.ctrl_cfg)", "parser = argparse.ArgumentParser() parser.add_argument('-env', type=str, required=True) parser.add_argument('-ca', '--ctrl_arg', action='append', nargs=2, default=[]) parser.add_argument('-o', '--override',", "cfg.exp_cfg.exp_cfg.policy = MPC(cfg.ctrl_cfg) exp = MBExperiment(cfg.exp_cfg) if os.path.exists(exp.logdir): overwrite = user_prompt( \"{} already", "\"\"\" Prompt the yes/no-*question* to the user. https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input \"\"\" from distutils.util import strtobool", "default=[]) parser.add_argument('-model-dir', type=str, required=True) parser.add_argument('-logdir', type=str, required=True) parser.add_argument('-init-iter', type=int, default=0) parser.add_argument('-last-iter', type=int, default=1)", "dotmap import DotMap from dmbrl.misc.MBExp import MBExperiment from dmbrl.controllers.MPC import MPC from dmbrl.config", "MPC from dmbrl.config import create_config def main( env, ctrl_type, ctrl_args, overrides, model_dir, logdir,", "parser.add_argument('-ca', '--ctrl_arg', action='append', nargs=2, default=[]) parser.add_argument('-o', '--override', action='append', nargs=2, default=[]) parser.add_argument('-model-dir', type=str, required=True)", "parser.add_argument('-no-raw-dir', action='store_true') args = parser.parse_args() main( args.env, \"MPC\", args.ctrl_arg, args.override, args.model_dir, args.logdir, args.init_iter,", "absolute_import import os import argparse import pprint from dotmap import DotMap from dmbrl.misc.MBExp", "dmbrl.config import create_config def main( env, ctrl_type, ctrl_args, overrides, model_dir, logdir, init_iter, last_iter,", "while True: user_input = input(question + \" [y/n]: \").lower() try: result = strtobool(user_input)", "MBExperiment from dmbrl.controllers.MPC import MPC from dmbrl.config import create_config def main( env, ctrl_type,", "ctrl_args = DotMap(**{key: val for (key, val) in ctrl_args}) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.model_dir\", model_dir]) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.load_model\", \"True\"])", "= argparse.ArgumentParser() parser.add_argument('-env', type=str, required=True) parser.add_argument('-ca', '--ctrl_arg', action='append', nargs=2, default=[]) parser.add_argument('-o', '--override', action='append',", "\"config.txt\"), \"w\") as f: f.write(pprint.pformat(cfg.toDict())) exp.run_experiment() print(\"Saved to\") print(exp.logdir) def user_prompt(question: str) ->", "logdir) cfg.pprint() if ctrl_type == \"MPC\": cfg.exp_cfg.exp_cfg.policy = MPC(cfg.ctrl_cfg) exp = MBExperiment(cfg.exp_cfg) if", "f.write(pprint.pformat(cfg.toDict())) exp.run_experiment() print(\"Saved to\") print(exp.logdir) def user_prompt(question: str) -> bool: \"\"\" Prompt the", "overrides.append([\"exp_cfg.exp_cfg.init_iter\", str(init_iter)]) overrides.append([\"exp_cfg.exp_cfg.ntrain_iters\", str(last_iter)]) overrides.append([\"exp_cfg.log_cfg.nrecord\", str(nrecord)]) overrides.append([\"exp_cfg.log_cfg.rawdir\", str(rawdir)]) cfg = create_config(env, ctrl_type, ctrl_args,", "or yes/no.\\n\") if __name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument('-env', type=str, required=True) parser.add_argument('-ca',", "import MPC from dmbrl.config import create_config def main( env, ctrl_type, ctrl_args, overrides, model_dir,", "+ \" [y/n]: \").lower() try: result = strtobool(user_input) return result except ValueError: print(\"Please", "exists. Overwrite?\".format(exp.logdir) ) if not overwrite: return else: os.makedirs(exp.logdir) with open(os.path.join(exp.logdir, \"config.txt\"), \"w\")", "action='store_true') args = parser.parse_args() main( args.env, \"MPC\", args.ctrl_arg, args.override, args.model_dir, args.logdir, args.init_iter, args.last_iter,", "exp.run_experiment() print(\"Saved to\") print(exp.logdir) def user_prompt(question: str) -> bool: \"\"\" Prompt the yes/no-*question*", "[y/n]: \").lower() try: result = strtobool(user_input) return result except ValueError: print(\"Please use y/n", "str(last_iter)]) overrides.append([\"exp_cfg.log_cfg.nrecord\", str(nrecord)]) overrides.append([\"exp_cfg.log_cfg.rawdir\", str(rawdir)]) cfg = create_config(env, ctrl_type, ctrl_args, overrides, logdir) cfg.pprint()", "default=[]) parser.add_argument('-o', '--override', action='append', nargs=2, default=[]) parser.add_argument('-model-dir', type=str, required=True) parser.add_argument('-logdir', type=str, required=True) parser.add_argument('-init-iter',", "dmbrl.controllers.MPC import MPC from dmbrl.config import create_config def main( env, ctrl_type, ctrl_args, overrides,", "overwrite: return else: os.makedirs(exp.logdir) with open(os.path.join(exp.logdir, \"config.txt\"), \"w\") as f: f.write(pprint.pformat(cfg.toDict())) exp.run_experiment() print(\"Saved", "str) -> bool: \"\"\" Prompt the yes/no-*question* to the user. https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input \"\"\" from", "bool: \"\"\" Prompt the yes/no-*question* to the user. https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input \"\"\" from distutils.util import", "type=str, required=True) parser.add_argument('-ca', '--ctrl_arg', action='append', nargs=2, default=[]) parser.add_argument('-o', '--override', action='append', nargs=2, default=[]) parser.add_argument('-model-dir',", "type=int, default=1) parser.add_argument('-no-raw-dir', action='store_true') args = parser.parse_args() main( args.env, \"MPC\", args.ctrl_arg, args.override, args.model_dir,", "(key, val) in ctrl_args}) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.model_dir\", model_dir]) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.load_model\", \"True\"]) overrides.append([\"ctrl_cfg.prop_cfg.model_pretrained\", \"True\"]) overrides.append([\"exp_cfg.exp_cfg.ninit_rollouts\", \"0\"]) overrides.append([\"exp_cfg.exp_cfg.init_iter\",", "= strtobool(user_input) return result except ValueError: print(\"Please use y/n or yes/no.\\n\") if __name__", "= create_config(env, ctrl_type, ctrl_args, overrides, logdir) cfg.pprint() if ctrl_type == \"MPC\": cfg.exp_cfg.exp_cfg.policy =", "if ctrl_type == \"MPC\": cfg.exp_cfg.exp_cfg.policy = MPC(cfg.ctrl_cfg) exp = MBExperiment(cfg.exp_cfg) if os.path.exists(exp.logdir): overwrite", "with open(os.path.join(exp.logdir, \"config.txt\"), \"w\") as f: f.write(pprint.pformat(cfg.toDict())) exp.run_experiment() print(\"Saved to\") print(exp.logdir) def user_prompt(question:", "print(\"Saved to\") print(exp.logdir) def user_prompt(question: str) -> bool: \"\"\" Prompt the yes/no-*question* to", "y/n or yes/no.\\n\") if __name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument('-env', type=str, required=True)", "overrides.append([\"exp_cfg.log_cfg.rawdir\", str(rawdir)]) cfg = create_config(env, ctrl_type, ctrl_args, overrides, logdir) cfg.pprint() if ctrl_type ==", "nargs=2, default=[]) parser.add_argument('-o', '--override', action='append', nargs=2, default=[]) parser.add_argument('-model-dir', type=str, required=True) parser.add_argument('-logdir', type=str, required=True)", "https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input \"\"\" from distutils.util import strtobool while True: user_input = input(question + \"", "ctrl_args, overrides, model_dir, logdir, init_iter, last_iter, nrecord, rawdir, ): ctrl_args = DotMap(**{key: val", "__future__ import absolute_import import os import argparse import pprint from dotmap import DotMap", "yes/no.\\n\") if __name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument('-env', type=str, required=True) parser.add_argument('-ca', '--ctrl_arg',", "ctrl_type == \"MPC\": cfg.exp_cfg.exp_cfg.policy = MPC(cfg.ctrl_cfg) exp = MBExperiment(cfg.exp_cfg) if os.path.exists(exp.logdir): overwrite =", "type=int, default=0) parser.add_argument('-last-iter', type=int, default=1) parser.add_argument('-nrecord', type=int, default=1) parser.add_argument('-no-raw-dir', action='store_true') args = parser.parse_args()", "else: os.makedirs(exp.logdir) with open(os.path.join(exp.logdir, \"config.txt\"), \"w\") as f: f.write(pprint.pformat(cfg.toDict())) exp.run_experiment() print(\"Saved to\") print(exp.logdir)", "def user_prompt(question: str) -> bool: \"\"\" Prompt the yes/no-*question* to the user. https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input", "input(question + \" [y/n]: \").lower() try: result = strtobool(user_input) return result except ValueError:", "required=True) parser.add_argument('-logdir', type=str, required=True) parser.add_argument('-init-iter', type=int, default=0) parser.add_argument('-last-iter', type=int, default=1) parser.add_argument('-nrecord', type=int, default=1)", "overwrite = user_prompt( \"{} already exists. Overwrite?\".format(exp.logdir) ) if not overwrite: return else:", "\"w\") as f: f.write(pprint.pformat(cfg.toDict())) exp.run_experiment() print(\"Saved to\") print(exp.logdir) def user_prompt(question: str) -> bool:", "str(rawdir)]) cfg = create_config(env, ctrl_type, ctrl_args, overrides, logdir) cfg.pprint() if ctrl_type == \"MPC\":", "): ctrl_args = DotMap(**{key: val for (key, val) in ctrl_args}) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.model_dir\", model_dir]) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.load_model\",", "model_dir, logdir, init_iter, last_iter, nrecord, rawdir, ): ctrl_args = DotMap(**{key: val for (key,", "= MBExperiment(cfg.exp_cfg) if os.path.exists(exp.logdir): overwrite = user_prompt( \"{} already exists. Overwrite?\".format(exp.logdir) ) if", ") if not overwrite: return else: os.makedirs(exp.logdir) with open(os.path.join(exp.logdir, \"config.txt\"), \"w\") as f:", "overrides, model_dir, logdir, init_iter, last_iter, nrecord, rawdir, ): ctrl_args = DotMap(**{key: val for", "action='append', nargs=2, default=[]) parser.add_argument('-model-dir', type=str, required=True) parser.add_argument('-logdir', type=str, required=True) parser.add_argument('-init-iter', type=int, default=0) parser.add_argument('-last-iter',", "required=True) parser.add_argument('-ca', '--ctrl_arg', action='append', nargs=2, default=[]) parser.add_argument('-o', '--override', action='append', nargs=2, default=[]) parser.add_argument('-model-dir', type=str,", "pprint from dotmap import DotMap from dmbrl.misc.MBExp import MBExperiment from dmbrl.controllers.MPC import MPC", "overrides.append([\"exp_cfg.exp_cfg.ninit_rollouts\", \"0\"]) overrides.append([\"exp_cfg.exp_cfg.init_iter\", str(init_iter)]) overrides.append([\"exp_cfg.exp_cfg.ntrain_iters\", str(last_iter)]) overrides.append([\"exp_cfg.log_cfg.nrecord\", str(nrecord)]) overrides.append([\"exp_cfg.log_cfg.rawdir\", str(rawdir)]) cfg = create_config(env,", "distutils.util import strtobool while True: user_input = input(question + \" [y/n]: \").lower() try:", "str(init_iter)]) overrides.append([\"exp_cfg.exp_cfg.ntrain_iters\", str(last_iter)]) overrides.append([\"exp_cfg.log_cfg.nrecord\", str(nrecord)]) overrides.append([\"exp_cfg.log_cfg.rawdir\", str(rawdir)]) cfg = create_config(env, ctrl_type, ctrl_args, overrides,", "str(nrecord)]) overrides.append([\"exp_cfg.log_cfg.rawdir\", str(rawdir)]) cfg = create_config(env, ctrl_type, ctrl_args, overrides, logdir) cfg.pprint() if ctrl_type", "parser.add_argument('-last-iter', type=int, default=1) parser.add_argument('-nrecord', type=int, default=1) parser.add_argument('-no-raw-dir', action='store_true') args = parser.parse_args() main( args.env,", "already exists. Overwrite?\".format(exp.logdir) ) if not overwrite: return else: os.makedirs(exp.logdir) with open(os.path.join(exp.logdir, \"config.txt\"),", "try: result = strtobool(user_input) return result except ValueError: print(\"Please use y/n or yes/no.\\n\")", "argparse.ArgumentParser() parser.add_argument('-env', type=str, required=True) parser.add_argument('-ca', '--ctrl_arg', action='append', nargs=2, default=[]) parser.add_argument('-o', '--override', action='append', nargs=2,", "== \"__main__\": parser = argparse.ArgumentParser() parser.add_argument('-env', type=str, required=True) parser.add_argument('-ca', '--ctrl_arg', action='append', nargs=2, default=[])", "default=1) parser.add_argument('-nrecord', type=int, default=1) parser.add_argument('-no-raw-dir', action='store_true') args = parser.parse_args() main( args.env, \"MPC\", args.ctrl_arg,", "\"0\"]) overrides.append([\"exp_cfg.exp_cfg.init_iter\", str(init_iter)]) overrides.append([\"exp_cfg.exp_cfg.ntrain_iters\", str(last_iter)]) overrides.append([\"exp_cfg.log_cfg.nrecord\", str(nrecord)]) overrides.append([\"exp_cfg.log_cfg.rawdir\", str(rawdir)]) cfg = create_config(env, ctrl_type,", "strtobool(user_input) return result except ValueError: print(\"Please use y/n or yes/no.\\n\") if __name__ ==", "parser.add_argument('-init-iter', type=int, default=0) parser.add_argument('-last-iter', type=int, default=1) parser.add_argument('-nrecord', type=int, default=1) parser.add_argument('-no-raw-dir', action='store_true') args =", "in ctrl_args}) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.model_dir\", model_dir]) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.load_model\", \"True\"]) overrides.append([\"ctrl_cfg.prop_cfg.model_pretrained\", \"True\"]) overrides.append([\"exp_cfg.exp_cfg.ninit_rollouts\", \"0\"]) overrides.append([\"exp_cfg.exp_cfg.init_iter\", str(init_iter)]) overrides.append([\"exp_cfg.exp_cfg.ntrain_iters\",", "type=str, required=True) parser.add_argument('-logdir', type=str, required=True) parser.add_argument('-init-iter', type=int, default=0) parser.add_argument('-last-iter', type=int, default=1) parser.add_argument('-nrecord', type=int,", "import absolute_import import os import argparse import pprint from dotmap import DotMap from", "val for (key, val) in ctrl_args}) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.model_dir\", model_dir]) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.load_model\", \"True\"]) overrides.append([\"ctrl_cfg.prop_cfg.model_pretrained\", \"True\"]) overrides.append([\"exp_cfg.exp_cfg.ninit_rollouts\",", "\"{} already exists. Overwrite?\".format(exp.logdir) ) if not overwrite: return else: os.makedirs(exp.logdir) with open(os.path.join(exp.logdir,", "args = parser.parse_args() main( args.env, \"MPC\", args.ctrl_arg, args.override, args.model_dir, args.logdir, args.init_iter, args.last_iter, args.nrecord,", "def main( env, ctrl_type, ctrl_args, overrides, model_dir, logdir, init_iter, last_iter, nrecord, rawdir, ):", "\" [y/n]: \").lower() try: result = strtobool(user_input) return result except ValueError: print(\"Please use", "the user. https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input \"\"\" from distutils.util import strtobool while True: user_input = input(question", "from dmbrl.misc.MBExp import MBExperiment from dmbrl.controllers.MPC import MPC from dmbrl.config import create_config def", "from dmbrl.config import create_config def main( env, ctrl_type, ctrl_args, overrides, model_dir, logdir, init_iter,", "parser.add_argument('-o', '--override', action='append', nargs=2, default=[]) parser.add_argument('-model-dir', type=str, required=True) parser.add_argument('-logdir', type=str, required=True) parser.add_argument('-init-iter', type=int,", "cfg.pprint() if ctrl_type == \"MPC\": cfg.exp_cfg.exp_cfg.policy = MPC(cfg.ctrl_cfg) exp = MBExperiment(cfg.exp_cfg) if os.path.exists(exp.logdir):", "ctrl_type, ctrl_args, overrides, model_dir, logdir, init_iter, last_iter, nrecord, rawdir, ): ctrl_args = DotMap(**{key:", "import MBExperiment from dmbrl.controllers.MPC import MPC from dmbrl.config import create_config def main( env,", "nrecord, rawdir, ): ctrl_args = DotMap(**{key: val for (key, val) in ctrl_args}) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.model_dir\",", "f: f.write(pprint.pformat(cfg.toDict())) exp.run_experiment() print(\"Saved to\") print(exp.logdir) def user_prompt(question: str) -> bool: \"\"\" Prompt", "user_input = input(question + \" [y/n]: \").lower() try: result = strtobool(user_input) return result", "overrides.append([\"exp_cfg.log_cfg.nrecord\", str(nrecord)]) overrides.append([\"exp_cfg.log_cfg.rawdir\", str(rawdir)]) cfg = create_config(env, ctrl_type, ctrl_args, overrides, logdir) cfg.pprint() if", "print(exp.logdir) def user_prompt(question: str) -> bool: \"\"\" Prompt the yes/no-*question* to the user.", "from dmbrl.controllers.MPC import MPC from dmbrl.config import create_config def main( env, ctrl_type, ctrl_args,", "parser.add_argument('-env', type=str, required=True) parser.add_argument('-ca', '--ctrl_arg', action='append', nargs=2, default=[]) parser.add_argument('-o', '--override', action='append', nargs=2, default=[])", "nargs=2, default=[]) parser.add_argument('-model-dir', type=str, required=True) parser.add_argument('-logdir', type=str, required=True) parser.add_argument('-init-iter', type=int, default=0) parser.add_argument('-last-iter', type=int,", "DotMap(**{key: val for (key, val) in ctrl_args}) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.model_dir\", model_dir]) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.load_model\", \"True\"]) overrides.append([\"ctrl_cfg.prop_cfg.model_pretrained\", \"True\"])", "required=True) parser.add_argument('-init-iter', type=int, default=0) parser.add_argument('-last-iter', type=int, default=1) parser.add_argument('-nrecord', type=int, default=1) parser.add_argument('-no-raw-dir', action='store_true') args", "True: user_input = input(question + \" [y/n]: \").lower() try: result = strtobool(user_input) return", "__name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument('-env', type=str, required=True) parser.add_argument('-ca', '--ctrl_arg', action='append', nargs=2,", "dmbrl.misc.MBExp import MBExperiment from dmbrl.controllers.MPC import MPC from dmbrl.config import create_config def main(", "overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.model_dir\", model_dir]) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.load_model\", \"True\"]) overrides.append([\"ctrl_cfg.prop_cfg.model_pretrained\", \"True\"]) overrides.append([\"exp_cfg.exp_cfg.ninit_rollouts\", \"0\"]) overrides.append([\"exp_cfg.exp_cfg.init_iter\", str(init_iter)]) overrides.append([\"exp_cfg.exp_cfg.ntrain_iters\", str(last_iter)]) overrides.append([\"exp_cfg.log_cfg.nrecord\",", "from __future__ import print_function from __future__ import absolute_import import os import argparse import", "= user_prompt( \"{} already exists. Overwrite?\".format(exp.logdir) ) if not overwrite: return else: os.makedirs(exp.logdir)", "type=int, default=1) parser.add_argument('-nrecord', type=int, default=1) parser.add_argument('-no-raw-dir', action='store_true') args = parser.parse_args() main( args.env, \"MPC\",", "\"\"\" from distutils.util import strtobool while True: user_input = input(question + \" [y/n]:", "parser.add_argument('-nrecord', type=int, default=1) parser.add_argument('-no-raw-dir', action='store_true') args = parser.parse_args() main( args.env, \"MPC\", args.ctrl_arg, args.override,", "'--ctrl_arg', action='append', nargs=2, default=[]) parser.add_argument('-o', '--override', action='append', nargs=2, default=[]) parser.add_argument('-model-dir', type=str, required=True) parser.add_argument('-logdir',", "create_config def main( env, ctrl_type, ctrl_args, overrides, model_dir, logdir, init_iter, last_iter, nrecord, rawdir,", "\"__main__\": parser = argparse.ArgumentParser() parser.add_argument('-env', type=str, required=True) parser.add_argument('-ca', '--ctrl_arg', action='append', nargs=2, default=[]) parser.add_argument('-o',", "DotMap from dmbrl.misc.MBExp import MBExperiment from dmbrl.controllers.MPC import MPC from dmbrl.config import create_config", "user_prompt(question: str) -> bool: \"\"\" Prompt the yes/no-*question* to the user. https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input \"\"\"", "strtobool while True: user_input = input(question + \" [y/n]: \").lower() try: result =", "__future__ import print_function from __future__ import absolute_import import os import argparse import pprint", "action='append', nargs=2, default=[]) parser.add_argument('-o', '--override', action='append', nargs=2, default=[]) parser.add_argument('-model-dir', type=str, required=True) parser.add_argument('-logdir', type=str,", "if __name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument('-env', type=str, required=True) parser.add_argument('-ca', '--ctrl_arg', action='append',", "default=0) parser.add_argument('-last-iter', type=int, default=1) parser.add_argument('-nrecord', type=int, default=1) parser.add_argument('-no-raw-dir', action='store_true') args = parser.parse_args() main(", "to the user. https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input \"\"\" from distutils.util import strtobool while True: user_input =", "parser.add_argument('-model-dir', type=str, required=True) parser.add_argument('-logdir', type=str, required=True) parser.add_argument('-init-iter', type=int, default=0) parser.add_argument('-last-iter', type=int, default=1) parser.add_argument('-nrecord',", "= MPC(cfg.ctrl_cfg) exp = MBExperiment(cfg.exp_cfg) if os.path.exists(exp.logdir): overwrite = user_prompt( \"{} already exists.", "ctrl_args, overrides, logdir) cfg.pprint() if ctrl_type == \"MPC\": cfg.exp_cfg.exp_cfg.policy = MPC(cfg.ctrl_cfg) exp =", "\"MPC\": cfg.exp_cfg.exp_cfg.policy = MPC(cfg.ctrl_cfg) exp = MBExperiment(cfg.exp_cfg) if os.path.exists(exp.logdir): overwrite = user_prompt( \"{}", "print(\"Please use y/n or yes/no.\\n\") if __name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument('-env',", "import create_config def main( env, ctrl_type, ctrl_args, overrides, model_dir, logdir, init_iter, last_iter, nrecord,", "return else: os.makedirs(exp.logdir) with open(os.path.join(exp.logdir, \"config.txt\"), \"w\") as f: f.write(pprint.pformat(cfg.toDict())) exp.run_experiment() print(\"Saved to\")", "return result except ValueError: print(\"Please use y/n or yes/no.\\n\") if __name__ == \"__main__\":", "MBExperiment(cfg.exp_cfg) if os.path.exists(exp.logdir): overwrite = user_prompt( \"{} already exists. Overwrite?\".format(exp.logdir) ) if not", "os.path.exists(exp.logdir): overwrite = user_prompt( \"{} already exists. Overwrite?\".format(exp.logdir) ) if not overwrite: return", "print_function from __future__ import absolute_import import os import argparse import pprint from dotmap", "import division from __future__ import print_function from __future__ import absolute_import import os import", "<reponame>vitchyr/handful-of-trials<gh_stars>1-10 from __future__ import division from __future__ import print_function from __future__ import absolute_import", "the yes/no-*question* to the user. https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input \"\"\" from distutils.util import strtobool while True:", "overrides, logdir) cfg.pprint() if ctrl_type == \"MPC\": cfg.exp_cfg.exp_cfg.policy = MPC(cfg.ctrl_cfg) exp = MBExperiment(cfg.exp_cfg)", "default=1) parser.add_argument('-no-raw-dir', action='store_true') args = parser.parse_args() main( args.env, \"MPC\", args.ctrl_arg, args.override, args.model_dir, args.logdir,", "for (key, val) in ctrl_args}) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.model_dir\", model_dir]) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.load_model\", \"True\"]) overrides.append([\"ctrl_cfg.prop_cfg.model_pretrained\", \"True\"]) overrides.append([\"exp_cfg.exp_cfg.ninit_rollouts\", \"0\"])", "os import argparse import pprint from dotmap import DotMap from dmbrl.misc.MBExp import MBExperiment", "exp = MBExperiment(cfg.exp_cfg) if os.path.exists(exp.logdir): overwrite = user_prompt( \"{} already exists. Overwrite?\".format(exp.logdir) )", "division from __future__ import print_function from __future__ import absolute_import import os import argparse", "__future__ import division from __future__ import print_function from __future__ import absolute_import import os", "if not overwrite: return else: os.makedirs(exp.logdir) with open(os.path.join(exp.logdir, \"config.txt\"), \"w\") as f: f.write(pprint.pformat(cfg.toDict()))", "import strtobool while True: user_input = input(question + \" [y/n]: \").lower() try: result", "parser.add_argument('-logdir', type=str, required=True) parser.add_argument('-init-iter', type=int, default=0) parser.add_argument('-last-iter', type=int, default=1) parser.add_argument('-nrecord', type=int, default=1) parser.add_argument('-no-raw-dir',", "type=str, required=True) parser.add_argument('-init-iter', type=int, default=0) parser.add_argument('-last-iter', type=int, default=1) parser.add_argument('-nrecord', type=int, default=1) parser.add_argument('-no-raw-dir', action='store_true')", "ValueError: print(\"Please use y/n or yes/no.\\n\") if __name__ == \"__main__\": parser = argparse.ArgumentParser()", "from distutils.util import strtobool while True: user_input = input(question + \" [y/n]: \").lower()", "ctrl_type, ctrl_args, overrides, logdir) cfg.pprint() if ctrl_type == \"MPC\": cfg.exp_cfg.exp_cfg.policy = MPC(cfg.ctrl_cfg) exp", "user. https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input \"\"\" from distutils.util import strtobool while True: user_input = input(question +", "cfg = create_config(env, ctrl_type, ctrl_args, overrides, logdir) cfg.pprint() if ctrl_type == \"MPC\": cfg.exp_cfg.exp_cfg.policy", "\"True\"]) overrides.append([\"exp_cfg.exp_cfg.ninit_rollouts\", \"0\"]) overrides.append([\"exp_cfg.exp_cfg.init_iter\", str(init_iter)]) overrides.append([\"exp_cfg.exp_cfg.ntrain_iters\", str(last_iter)]) overrides.append([\"exp_cfg.log_cfg.nrecord\", str(nrecord)]) overrides.append([\"exp_cfg.log_cfg.rawdir\", str(rawdir)]) cfg =", "import argparse import pprint from dotmap import DotMap from dmbrl.misc.MBExp import MBExperiment from", "env, ctrl_type, ctrl_args, overrides, model_dir, logdir, init_iter, last_iter, nrecord, rawdir, ): ctrl_args =", "\"True\"]) overrides.append([\"ctrl_cfg.prop_cfg.model_pretrained\", \"True\"]) overrides.append([\"exp_cfg.exp_cfg.ninit_rollouts\", \"0\"]) overrides.append([\"exp_cfg.exp_cfg.init_iter\", str(init_iter)]) overrides.append([\"exp_cfg.exp_cfg.ntrain_iters\", str(last_iter)]) overrides.append([\"exp_cfg.log_cfg.nrecord\", str(nrecord)]) overrides.append([\"exp_cfg.log_cfg.rawdir\", str(rawdir)])", "'--override', action='append', nargs=2, default=[]) parser.add_argument('-model-dir', type=str, required=True) parser.add_argument('-logdir', type=str, required=True) parser.add_argument('-init-iter', type=int, default=0)", "if os.path.exists(exp.logdir): overwrite = user_prompt( \"{} already exists. Overwrite?\".format(exp.logdir) ) if not overwrite:", "argparse import pprint from dotmap import DotMap from dmbrl.misc.MBExp import MBExperiment from dmbrl.controllers.MPC", "init_iter, last_iter, nrecord, rawdir, ): ctrl_args = DotMap(**{key: val for (key, val) in", "from __future__ import absolute_import import os import argparse import pprint from dotmap import", "overrides.append([\"ctrl_cfg.prop_cfg.model_pretrained\", \"True\"]) overrides.append([\"exp_cfg.exp_cfg.ninit_rollouts\", \"0\"]) overrides.append([\"exp_cfg.exp_cfg.init_iter\", str(init_iter)]) overrides.append([\"exp_cfg.exp_cfg.ntrain_iters\", str(last_iter)]) overrides.append([\"exp_cfg.log_cfg.nrecord\", str(nrecord)]) overrides.append([\"exp_cfg.log_cfg.rawdir\", str(rawdir)]) cfg", "not overwrite: return else: os.makedirs(exp.logdir) with open(os.path.join(exp.logdir, \"config.txt\"), \"w\") as f: f.write(pprint.pformat(cfg.toDict())) exp.run_experiment()", "import pprint from dotmap import DotMap from dmbrl.misc.MBExp import MBExperiment from dmbrl.controllers.MPC import", "= DotMap(**{key: val for (key, val) in ctrl_args}) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.model_dir\", model_dir]) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.load_model\", \"True\"]) overrides.append([\"ctrl_cfg.prop_cfg.model_pretrained\",", "open(os.path.join(exp.logdir, \"config.txt\"), \"w\") as f: f.write(pprint.pformat(cfg.toDict())) exp.run_experiment() print(\"Saved to\") print(exp.logdir) def user_prompt(question: str)", "user_prompt( \"{} already exists. Overwrite?\".format(exp.logdir) ) if not overwrite: return else: os.makedirs(exp.logdir) with", "ctrl_args}) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.model_dir\", model_dir]) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.load_model\", \"True\"]) overrides.append([\"ctrl_cfg.prop_cfg.model_pretrained\", \"True\"]) overrides.append([\"exp_cfg.exp_cfg.ninit_rollouts\", \"0\"]) overrides.append([\"exp_cfg.exp_cfg.init_iter\", str(init_iter)]) overrides.append([\"exp_cfg.exp_cfg.ntrain_iters\", str(last_iter)])", "to\") print(exp.logdir) def user_prompt(question: str) -> bool: \"\"\" Prompt the yes/no-*question* to the", "model_dir]) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.load_model\", \"True\"]) overrides.append([\"ctrl_cfg.prop_cfg.model_pretrained\", \"True\"]) overrides.append([\"exp_cfg.exp_cfg.ninit_rollouts\", \"0\"]) overrides.append([\"exp_cfg.exp_cfg.init_iter\", str(init_iter)]) overrides.append([\"exp_cfg.exp_cfg.ntrain_iters\", str(last_iter)]) overrides.append([\"exp_cfg.log_cfg.nrecord\", str(nrecord)])", "Overwrite?\".format(exp.logdir) ) if not overwrite: return else: os.makedirs(exp.logdir) with open(os.path.join(exp.logdir, \"config.txt\"), \"w\") as", "= parser.parse_args() main( args.env, \"MPC\", args.ctrl_arg, args.override, args.model_dir, args.logdir, args.init_iter, args.last_iter, args.nrecord, not", "val) in ctrl_args}) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.model_dir\", model_dir]) overrides.append([\"ctrl_cfg.prop_cfg.model_init_cfg.load_model\", \"True\"]) overrides.append([\"ctrl_cfg.prop_cfg.model_pretrained\", \"True\"]) overrides.append([\"exp_cfg.exp_cfg.ninit_rollouts\", \"0\"]) overrides.append([\"exp_cfg.exp_cfg.init_iter\", str(init_iter)])", "main( env, ctrl_type, ctrl_args, overrides, model_dir, logdir, init_iter, last_iter, nrecord, rawdir, ): ctrl_args", "result except ValueError: print(\"Please use y/n or yes/no.\\n\") if __name__ == \"__main__\": parser", "main( args.env, \"MPC\", args.ctrl_arg, args.override, args.model_dir, args.logdir, args.init_iter, args.last_iter, args.nrecord, not args.no_raw_dir, )", "logdir, init_iter, last_iter, nrecord, rawdir, ): ctrl_args = DotMap(**{key: val for (key, val)", "os.makedirs(exp.logdir) with open(os.path.join(exp.logdir, \"config.txt\"), \"w\") as f: f.write(pprint.pformat(cfg.toDict())) exp.run_experiment() print(\"Saved to\") print(exp.logdir) def", "= input(question + \" [y/n]: \").lower() try: result = strtobool(user_input) return result except", "result = strtobool(user_input) return result except ValueError: print(\"Please use y/n or yes/no.\\n\") if" ]
[ "is the CrossEntropy Loss and Lloc is the SmoothL1 Loss weighted by α", "matched default boxes See: https://arxiv.org/pdf/1512.02325.pdf for more details. \"\"\" def __init__(self, num_classes, overlap_thresh,", "in targets] num = loc_data.size(0) num_priors = priors.size(0) # match priors (default boxes)", "obj_data.view(-1, 2) # [num*num_priors, 2] logit_0 = batch_obj[:, 0].unsqueeze(1) + torch.log( torch.exp(batch_conf).sum(dim=1, keepdim=True))", "confidences, l: predicted boxes, g: ground truth boxes N: number of matched default", "boxes and labels for a batch, shape: [batch_size,num_objs,5] (last idx is the label).", "for anno in targets] num = loc_data.size(0) num_priors = priors.size(0) # match priors", "Loss Function Compute Targets: 1) Produce Confidence Target Indices by matching ground truth", "filter the excessive number of negative examples that comes with using a large", "3) Hard negative mining to filter the excessive number of negative examples that", "across batch for hard negative mining with torch.no_grad(): loss_obj = F.cross_entropy(obj_data.view(-1, 2), obj_t.long().view(-1),", "Examples mask = pos | neg weight = conf_t[mask][:, 1] loss_obj = torch.sum(F.cross_entropy(obj_data[mask],", "loc_data, conf_data, obj_data = predictions device = loc_data.device targets = [anno.to(device) for anno", "Localization Loss (Smooth L1) # Shape: [batch,num_priors,4] loc_p = loc_data[pos] loc_t = loc_t[pos]", "the excessive number of negative examples that comes with using a large number", "self).__init__() self.num_classes = num_classes self.threshold = overlap_thresh self.background_label = bkg_label self.encode_target = encode_target", "[num*num_priors, 2] logit_0 = batch_obj[:, 0].unsqueeze(1) + torch.log( torch.exp(batch_conf).sum(dim=1, keepdim=True)) logit_k = batch_obj[:,", "encode_target self.use_prior_for_matching = prior_for_matching self.do_neg_mining = neg_mining self.negpos_ratio = neg_pos self.neg_overlap = neg_overlap", "torch.sum(F.cross_entropy(obj_data[mask], obj_t[mask].long(), reduction='none') * weight) # Confidence Loss (cosine distance to classes center)", "(cosine distance to classes center) # pos [num, num_priors] # conf_data [num, num_priors,", ":, 1] * pos.float()).sum(1, keepdim=True).long() # Localization Loss (Smooth L1) # Shape: [batch,num_priors,4]", "F.cross_entropy(obj_data.view(-1, 2), obj_t.long().view(-1), reduction='none') # Hard Negative Mining loss_obj[obj_t.view(-1)] = 0 # filter", "[obj_num] defaults = priors.data # [num_priors,4] match(self.threshold, truths, defaults, self.variance, labels, loc_t, conf_t,", "threshold parameter (default threshold: 0.5). 2) Produce localization target by 'encoding' variance into", "is the SmoothL1 Loss weighted by α which is set to 1 by", "and Negative Examples mask = pos | neg weight = conf_t[mask][:, 1] loss_obj", "Loss Args: predictions (tuple): A tuple containing loc preds, conf preds, and prior", "prior_for_matching self.do_neg_mining = neg_mining self.negpos_ratio = neg_pos self.neg_overlap = neg_overlap self.variance = [0.1,", "ground_truth (tensor): Ground truth boxes and labels for a batch, shape: [batch_size,num_objs,5] (last", "offsets of ground truth boxes and their matched 'priorboxes'. 3) Hard negative mining", "conf preds, and prior boxes from SSD net. conf shape: torch.size(batch_size,num_priors,num_classes) loc shape:", "[num, num_priors] # conf_data [num, num_priors, feature_dim] batch_conf = conf_data.view(-1, self.num_classes-1) # Compute", "the CrossEntropy Loss and Lloc is the SmoothL1 Loss weighted by α which", "* weight) N = num_pos.sum() loss_l /= N loss_c /= N loss_obj /=", "0.5). 2) Produce localization target by 'encoding' variance into offsets of ground truth", "num_priors] num_pos = (conf_t[:, :, 1] * pos.float()).sum(1, keepdim=True).long() # Localization Loss (Smooth", "conf_t[pos][:, 1] loss_l = torch.sum(torch.sum(loss_l, dim=1) * weight_pos) # Compute object loss across", "ground truth boxes with (default) 'priorboxes' that have jaccard index > threshold parameter", "encode_target): super(MultiBoxLoss_combined, self).__init__() self.num_classes = num_classes self.threshold = overlap_thresh self.background_label = bkg_label self.encode_target", "# filter out pos boxes (label>0) and ignored boxes (label=-1) for now loss_obj", "num_priors] # Object Loss Including Positive and Negative Examples mask = pos |", "negative mining (logit-combined) batch_obj = obj_data.view(-1, 2) # [num*num_priors, 2] logit_0 = batch_obj[:,", "torch.exp(batch_conf).sum(dim=1, keepdim=True)) logit_k = batch_obj[:, 1].unsqueeze(1).expand_as(batch_conf) + batch_conf logit = torch.cat((logit_0, logit_k), 1)", "= prior_for_matching self.do_neg_mining = neg_mining self.negpos_ratio = neg_pos self.neg_overlap = neg_overlap self.variance =", "Compute Targets: 1) Produce Confidence Target Indices by matching ground truth boxes with", "self.background_label = bkg_label self.encode_target = encode_target self.use_prior_for_matching = prior_for_matching self.do_neg_mining = neg_mining self.negpos_ratio", "2) # [num*num_priors, 2] logit_0 = batch_obj[:, 0].unsqueeze(1) + torch.log( torch.exp(batch_conf).sum(dim=1, keepdim=True)) logit_k", "* pos.float()).sum(1, keepdim=True).long() # Localization Loss (Smooth L1) # Shape: [batch,num_priors,4] loc_p =", "range(num): # batch_size truths = targets[idx][:, :-2].data # [obj_num, 4] labels = targets[idx][:,", "with torch.no_grad(): loss_obj = F.cross_entropy(obj_data.view(-1, 2), obj_t.long().view(-1), reduction='none') # Hard Negative Mining loss_obj[obj_t.view(-1)]", "labels for a batch, shape: [batch_size,num_objs,5] (last idx is the label). \"\"\" #", "(Smooth L1) # Shape: [batch,num_priors,4] loc_p = loc_data[pos] loc_t = loc_t[pos] loss_l =", "mining to filter the excessive number of negative examples that comes with using", "truths = targets[idx][:, :-2].data # [obj_num, 4] labels = targets[idx][:, -2:].data # [obj_num]", "and ground truth boxes loc_t = torch.Tensor(num, num_priors, 4).to(device) conf_t = torch.Tensor(num, num_priors,", "num_priors = priors.size(0) # match priors (default boxes) and ground truth boxes loc_t", "and ignored boxes (label=-1) for now loss_obj = loss_obj.view(num, -1) _, loss_idx =", "-1, self.num_classes) loss_c = torch.sum(F.cross_entropy(logit[mask], conf_t[mask][:, 0].long(), reduction='none') * weight) N = num_pos.sum()", "batch_obj[:, 1].unsqueeze(1).expand_as(batch_conf) + batch_conf logit = torch.cat((logit_0, logit_k), 1) # Confidence Loss Including", "self.threshold = overlap_thresh self.background_label = bkg_label self.encode_target = encode_target self.use_prior_for_matching = prior_for_matching self.do_neg_mining", "predictions (tuple): A tuple containing loc preds, conf preds, and prior boxes from", "-1) _, loss_idx = loss_obj.sort(1, descending=True) _, idx_rank = loss_idx.sort(1) num_neg = torch.clamp(self.negpos_ratio", "Confidence Loss (cosine distance to classes center) # pos [num, num_priors] # conf_data", "feature_dim] batch_conf = conf_data.view(-1, self.num_classes-1) # Compute max conf across batch for hard", "= loc_data[pos] loc_t = loc_t[pos] loss_l = F.smooth_l1_loss(loc_p, loc_t, reduction='none') weight_pos = conf_t[pos][:,", "Confidence Loss Including Positive and Negative Examples logit = logit.view(num, -1, self.num_classes) loss_c", "\"\"\" # loc_data[batch_size, num_priors, 4] # conf_data[batch_size, num_priors, num_classes] # obj_data[batch_size, num_priors, 2]", "Loss Including Positive and Negative Examples logit = logit.view(num, -1, self.num_classes) loss_c =", "- 1) neg = idx_rank < num_neg.expand_as(idx_rank) # [num, num_priors] # Object Loss", "torch.Tensor(num, num_priors, 4).to(device) conf_t = torch.Tensor(num, num_priors, 2).to(device) obj_t = torch.BoolTensor(num, num_priors).to(device) #", "index > threshold parameter (default threshold: 0.5). 2) Produce localization target by 'encoding'", "a large number of default bounding boxes. (default negative:positive ratio 3:1) Objective Loss:", "parameter (default threshold: 0.5). 2) Produce localization target by 'encoding' variance into offsets", "MultiBoxLoss_combined(nn.Module): \"\"\"SSD Weighted Loss Function Compute Targets: 1) Produce Confidence Target Indices by", "# [obj_num] defaults = priors.data # [num_priors,4] match(self.threshold, truths, defaults, self.variance, labels, loc_t,", "batch, shape: [batch_size,num_objs,5] (last idx is the label). \"\"\" # loc_data[batch_size, num_priors, 4]", "= loc_t[pos] loss_l = F.smooth_l1_loss(loc_p, loc_t, reduction='none') weight_pos = conf_t[pos][:, 1] loss_l =", "class confidences, l: predicted boxes, g: ground truth boxes N: number of matched", "\"\"\"SSD Weighted Loss Function Compute Targets: 1) Produce Confidence Target Indices by matching", "c) + αLloc(x,l,g)) / N Where, Lconf is the CrossEntropy Loss and Lloc", "now loss_obj = loss_obj.view(num, -1) _, loss_idx = loss_obj.sort(1, descending=True) _, idx_rank =", "= priors.size(0) # match priors (default boxes) and ground truth boxes loc_t =", "# conf_data [num, num_priors, feature_dim] batch_conf = conf_data.view(-1, self.num_classes-1) # Compute max conf", "= num_classes self.threshold = overlap_thresh self.background_label = bkg_label self.encode_target = encode_target self.use_prior_for_matching =", "tuple containing loc preds, conf preds, and prior boxes from SSD net. conf", "_, loss_idx = loss_obj.sort(1, descending=True) _, idx_rank = loss_idx.sort(1) num_neg = torch.clamp(self.negpos_ratio *", "for hard negative mining (logit-combined) batch_obj = obj_data.view(-1, 2) # [num*num_priors, 2] logit_0", "conf_data [num, num_priors, feature_dim] batch_conf = conf_data.view(-1, self.num_classes-1) # Compute max conf across", "self.do_neg_mining = neg_mining self.negpos_ratio = neg_pos self.neg_overlap = neg_overlap self.variance = [0.1, 0.2]", "label). \"\"\" # loc_data[batch_size, num_priors, 4] # conf_data[batch_size, num_priors, num_classes] # obj_data[batch_size, num_priors,", "_, idx_rank = loss_idx.sort(1) num_neg = torch.clamp(self.negpos_ratio * num_pos, max=num_priors - 1) neg", "-2:].data # [obj_num] defaults = priors.data # [num_priors,4] match(self.threshold, truths, defaults, self.variance, labels,", "for hard negative mining with torch.no_grad(): loss_obj = F.cross_entropy(obj_data.view(-1, 2), obj_t.long().view(-1), reduction='none') #", "torch.size(batch_size,num_priors,num_classes) loc shape: torch.size(batch_size,num_priors,4) priors shape: torch.size(num_priors,4) ground_truth (tensor): Ground truth boxes and", "= [0.1, 0.2] def forward(self, predictions, priors, targets): \"\"\"Multibox Loss Args: predictions (tuple):", "conf_t, obj_t, idx) pos = (conf_t[:, :, 0] > 0).bool() # [num, num_priors]", "import torch import torch.nn as nn import torch.nn.functional as F from utils.box_utils import", "loss_l = torch.sum(torch.sum(loss_l, dim=1) * weight_pos) # Compute object loss across batch for", "= loss_obj.sort(1, descending=True) _, idx_rank = loss_idx.sort(1) num_neg = torch.clamp(self.negpos_ratio * num_pos, max=num_priors", "the SmoothL1 Loss weighted by α which is set to 1 by cross", "0).bool() # [num, num_priors] num_pos = (conf_t[:, :, 1] * pos.float()).sum(1, keepdim=True).long() #", "torch import torch.nn as nn import torch.nn.functional as F from utils.box_utils import match", "= targets[idx][:, -2:].data # [obj_num] defaults = priors.data # [num_priors,4] match(self.threshold, truths, defaults,", "boxes) and ground truth boxes loc_t = torch.Tensor(num, num_priors, 4).to(device) conf_t = torch.Tensor(num,", "Including Positive and Negative Examples mask = pos | neg weight = conf_t[mask][:,", "= conf_t[mask][:, 1] loss_obj = torch.sum(F.cross_entropy(obj_data[mask], obj_t[mask].long(), reduction='none') * weight) # Confidence Loss", "Hard Negative Mining loss_obj[obj_t.view(-1)] = 0 # filter out pos boxes (label>0) and", "cross val. Args: c: class confidences, l: predicted boxes, g: ground truth boxes", "reduction='none') * weight) # Confidence Loss (cosine distance to classes center) # pos", "https://arxiv.org/pdf/1512.02325.pdf for more details. \"\"\" def __init__(self, num_classes, overlap_thresh, prior_for_matching, bkg_label, neg_mining, neg_pos,", "Where, Lconf is the CrossEntropy Loss and Lloc is the SmoothL1 Loss weighted", "# pos [num, num_priors] # conf_data [num, num_priors, feature_dim] batch_conf = conf_data.view(-1, self.num_classes-1)", "Function Compute Targets: 1) Produce Confidence Target Indices by matching ground truth boxes", "[num, num_priors] num_pos = (conf_t[:, :, 1] * pos.float()).sum(1, keepdim=True).long() # Localization Loss", "that comes with using a large number of default bounding boxes. (default negative:positive", "reduction='none') weight_pos = conf_t[pos][:, 1] loss_l = torch.sum(torch.sum(loss_l, dim=1) * weight_pos) # Compute", "Shape: [batch,num_priors,4] loc_p = loc_data[pos] loc_t = loc_t[pos] loss_l = F.smooth_l1_loss(loc_p, loc_t, reduction='none')", "match priors (default boxes) and ground truth boxes loc_t = torch.Tensor(num, num_priors, 4).to(device)", "match priors with gt for idx in range(num): # batch_size truths = targets[idx][:,", "(default) 'priorboxes' that have jaccard index > threshold parameter (default threshold: 0.5). 2)", "utils.box_utils import match class MultiBoxLoss_combined(nn.Module): \"\"\"SSD Weighted Loss Function Compute Targets: 1) Produce", "Produce Confidence Target Indices by matching ground truth boxes with (default) 'priorboxes' that", "torch.nn.functional as F from utils.box_utils import match class MultiBoxLoss_combined(nn.Module): \"\"\"SSD Weighted Loss Function", "\"\"\" def __init__(self, num_classes, overlap_thresh, prior_for_matching, bkg_label, neg_mining, neg_pos, neg_overlap, encode_target): super(MultiBoxLoss_combined, self).__init__()", "# Object Loss Including Positive and Negative Examples mask = pos | neg", "= loc_data.size(0) num_priors = priors.size(0) # match priors (default boxes) and ground truth", "= (conf_t[:, :, 1] * pos.float()).sum(1, keepdim=True).long() # Localization Loss (Smooth L1) #", "(default negative:positive ratio 3:1) Objective Loss: L(x,c,l,g) = (Lconf(x, c) + αLloc(x,l,g)) /", "ground truth boxes loc_t = torch.Tensor(num, num_priors, 4).to(device) conf_t = torch.Tensor(num, num_priors, 2).to(device)", "l: predicted boxes, g: ground truth boxes N: number of matched default boxes", "num_neg.expand_as(idx_rank) # [num, num_priors] # Object Loss Including Positive and Negative Examples mask", "by matching ground truth boxes with (default) 'priorboxes' that have jaccard index >", "loc_t[pos] loss_l = F.smooth_l1_loss(loc_p, loc_t, reduction='none') weight_pos = conf_t[pos][:, 1] loss_l = torch.sum(torch.sum(loss_l,", "# match priors (default boxes) and ground truth boxes loc_t = torch.Tensor(num, num_priors,", "import torch.nn as nn import torch.nn.functional as F from utils.box_utils import match class", "# Localization Loss (Smooth L1) # Shape: [batch,num_priors,4] loc_p = loc_data[pos] loc_t =", "gt for idx in range(num): # batch_size truths = targets[idx][:, :-2].data # [obj_num,", "num_priors, 2).to(device) obj_t = torch.BoolTensor(num, num_priors).to(device) # match priors with gt for idx", "comes with using a large number of default bounding boxes. (default negative:positive ratio", "device = loc_data.device targets = [anno.to(device) for anno in targets] num = loc_data.size(0)", "batch_obj = obj_data.view(-1, 2) # [num*num_priors, 2] logit_0 = batch_obj[:, 0].unsqueeze(1) + torch.log(", "# Hard Negative Mining loss_obj[obj_t.view(-1)] = 0 # filter out pos boxes (label>0)", "into offsets of ground truth boxes and their matched 'priorboxes'. 3) Hard negative", "'encoding' variance into offsets of ground truth boxes and their matched 'priorboxes'. 3)", "batch_size truths = targets[idx][:, :-2].data # [obj_num, 4] labels = targets[idx][:, -2:].data #", "loss across batch for hard negative mining with torch.no_grad(): loss_obj = F.cross_entropy(obj_data.view(-1, 2),", "4).to(device) conf_t = torch.Tensor(num, num_priors, 2).to(device) obj_t = torch.BoolTensor(num, num_priors).to(device) # match priors", "mining with torch.no_grad(): loss_obj = F.cross_entropy(obj_data.view(-1, 2), obj_t.long().view(-1), reduction='none') # Hard Negative Mining", "loc_t = torch.Tensor(num, num_priors, 4).to(device) conf_t = torch.Tensor(num, num_priors, 2).to(device) obj_t = torch.BoolTensor(num,", "val. Args: c: class confidences, l: predicted boxes, g: ground truth boxes N:", "0] > 0).bool() # [num, num_priors] num_pos = (conf_t[:, :, 1] * pos.float()).sum(1,", "neg_mining self.negpos_ratio = neg_pos self.neg_overlap = neg_overlap self.variance = [0.1, 0.2] def forward(self,", "loc_t, reduction='none') weight_pos = conf_t[pos][:, 1] loss_l = torch.sum(torch.sum(loss_l, dim=1) * weight_pos) #", "num_priors, 4).to(device) conf_t = torch.Tensor(num, num_priors, 2).to(device) obj_t = torch.BoolTensor(num, num_priors).to(device) # match", "weight = conf_t[mask][:, 1] loss_obj = torch.sum(F.cross_entropy(obj_data[mask], obj_t[mask].long(), reduction='none') * weight) # Confidence", "torch.log( torch.exp(batch_conf).sum(dim=1, keepdim=True)) logit_k = batch_obj[:, 1].unsqueeze(1).expand_as(batch_conf) + batch_conf logit = torch.cat((logit_0, logit_k),", "predictions device = loc_data.device targets = [anno.to(device) for anno in targets] num =", "# Confidence Loss (cosine distance to classes center) # pos [num, num_priors] #", "negative mining to filter the excessive number of negative examples that comes with", "logit_k = batch_obj[:, 1].unsqueeze(1).expand_as(batch_conf) + batch_conf logit = torch.cat((logit_0, logit_k), 1) # Confidence", "# Confidence Loss Including Positive and Negative Examples logit = logit.view(num, -1, self.num_classes)", "pos = (conf_t[:, :, 0] > 0).bool() # [num, num_priors] num_pos = (conf_t[:,", "N Where, Lconf is the CrossEntropy Loss and Lloc is the SmoothL1 Loss", "1] loss_obj = torch.sum(F.cross_entropy(obj_data[mask], obj_t[mask].long(), reduction='none') * weight) # Confidence Loss (cosine distance", "target by 'encoding' variance into offsets of ground truth boxes and their matched", "Negative Examples logit = logit.view(num, -1, self.num_classes) loss_c = torch.sum(F.cross_entropy(logit[mask], conf_t[mask][:, 0].long(), reduction='none')", "targets[idx][:, -2:].data # [obj_num] defaults = priors.data # [num_priors,4] match(self.threshold, truths, defaults, self.variance,", "# loc_data[batch_size, num_priors, 4] # conf_data[batch_size, num_priors, num_classes] # obj_data[batch_size, num_priors, 2] loc_data,", "loc_p = loc_data[pos] loc_t = loc_t[pos] loss_l = F.smooth_l1_loss(loc_p, loc_t, reduction='none') weight_pos =", "neg = idx_rank < num_neg.expand_as(idx_rank) # [num, num_priors] # Object Loss Including Positive", "αLloc(x,l,g)) / N Where, Lconf is the CrossEntropy Loss and Lloc is the", "loss_obj = loss_obj.view(num, -1) _, loss_idx = loss_obj.sort(1, descending=True) _, idx_rank = loss_idx.sort(1)", "= bkg_label self.encode_target = encode_target self.use_prior_for_matching = prior_for_matching self.do_neg_mining = neg_mining self.negpos_ratio =", "batch_obj[:, 0].unsqueeze(1) + torch.log( torch.exp(batch_conf).sum(dim=1, keepdim=True)) logit_k = batch_obj[:, 1].unsqueeze(1).expand_as(batch_conf) + batch_conf logit", "ratio 3:1) Objective Loss: L(x,c,l,g) = (Lconf(x, c) + αLloc(x,l,g)) / N Where,", "preds, conf preds, and prior boxes from SSD net. conf shape: torch.size(batch_size,num_priors,num_classes) loc", "N: number of matched default boxes See: https://arxiv.org/pdf/1512.02325.pdf for more details. \"\"\" def", "# obj_data[batch_size, num_priors, 2] loc_data, conf_data, obj_data = predictions device = loc_data.device targets", "1) neg = idx_rank < num_neg.expand_as(idx_rank) # [num, num_priors] # Object Loss Including", "and prior boxes from SSD net. conf shape: torch.size(batch_size,num_priors,num_classes) loc shape: torch.size(batch_size,num_priors,4) priors", "labels = targets[idx][:, -2:].data # [obj_num] defaults = priors.data # [num_priors,4] match(self.threshold, truths,", "boxes with (default) 'priorboxes' that have jaccard index > threshold parameter (default threshold:", "from SSD net. conf shape: torch.size(batch_size,num_priors,num_classes) loc shape: torch.size(batch_size,num_priors,4) priors shape: torch.size(num_priors,4) ground_truth", "[obj_num, 4] labels = targets[idx][:, -2:].data # [obj_num] defaults = priors.data # [num_priors,4]", "batch for hard negative mining (logit-combined) batch_obj = obj_data.view(-1, 2) # [num*num_priors, 2]", "across batch for hard negative mining (logit-combined) batch_obj = obj_data.view(-1, 2) # [num*num_priors,", "> threshold parameter (default threshold: 0.5). 2) Produce localization target by 'encoding' variance", "for now loss_obj = loss_obj.view(num, -1) _, loss_idx = loss_obj.sort(1, descending=True) _, idx_rank", "loc_data[pos] loc_t = loc_t[pos] loss_l = F.smooth_l1_loss(loc_p, loc_t, reduction='none') weight_pos = conf_t[pos][:, 1]", "torch.BoolTensor(num, num_priors).to(device) # match priors with gt for idx in range(num): # batch_size", "max conf across batch for hard negative mining (logit-combined) batch_obj = obj_data.view(-1, 2)", "boxes (label>0) and ignored boxes (label=-1) for now loss_obj = loss_obj.view(num, -1) _,", "object loss across batch for hard negative mining with torch.no_grad(): loss_obj = F.cross_entropy(obj_data.view(-1,", "dim=1) * weight_pos) # Compute object loss across batch for hard negative mining", "default bounding boxes. (default negative:positive ratio 3:1) Objective Loss: L(x,c,l,g) = (Lconf(x, c)", "num_classes self.threshold = overlap_thresh self.background_label = bkg_label self.encode_target = encode_target self.use_prior_for_matching = prior_for_matching", "[num_priors,4] match(self.threshold, truths, defaults, self.variance, labels, loc_t, conf_t, obj_t, idx) pos = (conf_t[:,", "defaults, self.variance, labels, loc_t, conf_t, obj_t, idx) pos = (conf_t[:, :, 0] >", "and their matched 'priorboxes'. 3) Hard negative mining to filter the excessive number", "neg_mining, neg_pos, neg_overlap, encode_target): super(MultiBoxLoss_combined, self).__init__() self.num_classes = num_classes self.threshold = overlap_thresh self.background_label", "num_priors] # conf_data [num, num_priors, feature_dim] batch_conf = conf_data.view(-1, self.num_classes-1) # Compute max", "# [num*num_priors, 2] logit_0 = batch_obj[:, 0].unsqueeze(1) + torch.log( torch.exp(batch_conf).sum(dim=1, keepdim=True)) logit_k =", "reduction='none') # Hard Negative Mining loss_obj[obj_t.view(-1)] = 0 # filter out pos boxes", "boxes N: number of matched default boxes See: https://arxiv.org/pdf/1512.02325.pdf for more details. \"\"\"", "= priors.data # [num_priors,4] match(self.threshold, truths, defaults, self.variance, labels, loc_t, conf_t, obj_t, idx)", "shape: [batch_size,num_objs,5] (last idx is the label). \"\"\" # loc_data[batch_size, num_priors, 4] #", "# conf_data[batch_size, num_priors, num_classes] # obj_data[batch_size, num_priors, 2] loc_data, conf_data, obj_data = predictions", "= obj_data.view(-1, 2) # [num*num_priors, 2] logit_0 = batch_obj[:, 0].unsqueeze(1) + torch.log( torch.exp(batch_conf).sum(dim=1,", "predicted boxes, g: ground truth boxes N: number of matched default boxes See:", "(Lconf(x, c) + αLloc(x,l,g)) / N Where, Lconf is the CrossEntropy Loss and", "2), obj_t.long().view(-1), reduction='none') # Hard Negative Mining loss_obj[obj_t.view(-1)] = 0 # filter out", "= idx_rank < num_neg.expand_as(idx_rank) # [num, num_priors] # Object Loss Including Positive and", "by 'encoding' variance into offsets of ground truth boxes and their matched 'priorboxes'.", "# Compute object loss across batch for hard negative mining with torch.no_grad(): loss_obj", "negative mining with torch.no_grad(): loss_obj = F.cross_entropy(obj_data.view(-1, 2), obj_t.long().view(-1), reduction='none') # Hard Negative", "shape: torch.size(batch_size,num_priors,num_classes) loc shape: torch.size(batch_size,num_priors,4) priors shape: torch.size(num_priors,4) ground_truth (tensor): Ground truth boxes", "__init__(self, num_classes, overlap_thresh, prior_for_matching, bkg_label, neg_mining, neg_pos, neg_overlap, encode_target): super(MultiBoxLoss_combined, self).__init__() self.num_classes =", "conf_data.view(-1, self.num_classes-1) # Compute max conf across batch for hard negative mining (logit-combined)", "truth boxes and their matched 'priorboxes'. 3) Hard negative mining to filter the", "Produce localization target by 'encoding' variance into offsets of ground truth boxes and", "num_priors).to(device) # match priors with gt for idx in range(num): # batch_size truths", "c: class confidences, l: predicted boxes, g: ground truth boxes N: number of", "keepdim=True)) logit_k = batch_obj[:, 1].unsqueeze(1).expand_as(batch_conf) + batch_conf logit = torch.cat((logit_0, logit_k), 1) #", "= torch.sum(torch.sum(loss_l, dim=1) * weight_pos) # Compute object loss across batch for hard", "mask = pos | neg weight = conf_t[mask][:, 1] loss_obj = torch.sum(F.cross_entropy(obj_data[mask], obj_t[mask].long(),", "idx_rank < num_neg.expand_as(idx_rank) # [num, num_priors] # Object Loss Including Positive and Negative", "of negative examples that comes with using a large number of default bounding", "descending=True) _, idx_rank = loss_idx.sort(1) num_neg = torch.clamp(self.negpos_ratio * num_pos, max=num_priors - 1)", "= (conf_t[:, :, 0] > 0).bool() # [num, num_priors] num_pos = (conf_t[:, :,", "3:1) Objective Loss: L(x,c,l,g) = (Lconf(x, c) + αLloc(x,l,g)) / N Where, Lconf", "for more details. \"\"\" def __init__(self, num_classes, overlap_thresh, prior_for_matching, bkg_label, neg_mining, neg_pos, neg_overlap,", "ignored boxes (label=-1) for now loss_obj = loss_obj.view(num, -1) _, loss_idx = loss_obj.sort(1,", "= torch.cat((logit_0, logit_k), 1) # Confidence Loss Including Positive and Negative Examples logit", "obj_data = predictions device = loc_data.device targets = [anno.to(device) for anno in targets]", "hard negative mining with torch.no_grad(): loss_obj = F.cross_entropy(obj_data.view(-1, 2), obj_t.long().view(-1), reduction='none') # Hard", "= conf_data.view(-1, self.num_classes-1) # Compute max conf across batch for hard negative mining", "num_pos, max=num_priors - 1) neg = idx_rank < num_neg.expand_as(idx_rank) # [num, num_priors] #", "0 # filter out pos boxes (label>0) and ignored boxes (label=-1) for now", "See: https://arxiv.org/pdf/1512.02325.pdf for more details. \"\"\" def __init__(self, num_classes, overlap_thresh, prior_for_matching, bkg_label, neg_mining,", "obj_t.long().view(-1), reduction='none') # Hard Negative Mining loss_obj[obj_t.view(-1)] = 0 # filter out pos", "as nn import torch.nn.functional as F from utils.box_utils import match class MultiBoxLoss_combined(nn.Module): \"\"\"SSD", "weight_pos) # Compute object loss across batch for hard negative mining with torch.no_grad():", "conf_t[mask][:, 1] loss_obj = torch.sum(F.cross_entropy(obj_data[mask], obj_t[mask].long(), reduction='none') * weight) # Confidence Loss (cosine", "* num_pos, max=num_priors - 1) neg = idx_rank < num_neg.expand_as(idx_rank) # [num, num_priors]", "= [anno.to(device) for anno in targets] num = loc_data.size(0) num_priors = priors.size(0) #", "L1) # Shape: [batch,num_priors,4] loc_p = loc_data[pos] loc_t = loc_t[pos] loss_l = F.smooth_l1_loss(loc_p,", "Loss (cosine distance to classes center) # pos [num, num_priors] # conf_data [num,", "targets[idx][:, :-2].data # [obj_num, 4] labels = targets[idx][:, -2:].data # [obj_num] defaults =", "number of negative examples that comes with using a large number of default", "Positive and Negative Examples logit = logit.view(num, -1, self.num_classes) loss_c = torch.sum(F.cross_entropy(logit[mask], conf_t[mask][:,", "= neg_mining self.negpos_ratio = neg_pos self.neg_overlap = neg_overlap self.variance = [0.1, 0.2] def", "examples that comes with using a large number of default bounding boxes. (default", "= torch.Tensor(num, num_priors, 4).to(device) conf_t = torch.Tensor(num, num_priors, 2).to(device) obj_t = torch.BoolTensor(num, num_priors).to(device)", "targets = [anno.to(device) for anno in targets] num = loc_data.size(0) num_priors = priors.size(0)", "[num, num_priors, feature_dim] batch_conf = conf_data.view(-1, self.num_classes-1) # Compute max conf across batch", "loc_data[batch_size, num_priors, 4] # conf_data[batch_size, num_priors, num_classes] # obj_data[batch_size, num_priors, 2] loc_data, conf_data,", "Loss and Lloc is the SmoothL1 Loss weighted by α which is set", "Lconf is the CrossEntropy Loss and Lloc is the SmoothL1 Loss weighted by", "= torch.sum(F.cross_entropy(logit[mask], conf_t[mask][:, 0].long(), reduction='none') * weight) N = num_pos.sum() loss_l /= N", "num_pos.sum() loss_l /= N loss_c /= N loss_obj /= N return {'loss_box_reg': loss_l,", "targets): \"\"\"Multibox Loss Args: predictions (tuple): A tuple containing loc preds, conf preds,", "loss_obj[obj_t.view(-1)] = 0 # filter out pos boxes (label>0) and ignored boxes (label=-1)", "N loss_c /= N loss_obj /= N return {'loss_box_reg': loss_l, 'loss_cls': loss_c, 'loss_obj':", "self.variance = [0.1, 0.2] def forward(self, predictions, priors, targets): \"\"\"Multibox Loss Args: predictions", "is set to 1 by cross val. Args: c: class confidences, l: predicted", "loss_obj.sort(1, descending=True) _, idx_rank = loss_idx.sort(1) num_neg = torch.clamp(self.negpos_ratio * num_pos, max=num_priors -", "Loss: L(x,c,l,g) = (Lconf(x, c) + αLloc(x,l,g)) / N Where, Lconf is the", "Weighted Loss Function Compute Targets: 1) Produce Confidence Target Indices by matching ground", "reduction='none') * weight) N = num_pos.sum() loss_l /= N loss_c /= N loss_obj", "loc shape: torch.size(batch_size,num_priors,4) priors shape: torch.size(num_priors,4) ground_truth (tensor): Ground truth boxes and labels", "self.variance, labels, loc_t, conf_t, obj_t, idx) pos = (conf_t[:, :, 0] > 0).bool()", "weighted by α which is set to 1 by cross val. Args: c:", "to 1 by cross val. Args: c: class confidences, l: predicted boxes, g:", "= neg_overlap self.variance = [0.1, 0.2] def forward(self, predictions, priors, targets): \"\"\"Multibox Loss", "# [num, num_priors] num_pos = (conf_t[:, :, 1] * pos.float()).sum(1, keepdim=True).long() # Localization", "for a batch, shape: [batch_size,num_objs,5] (last idx is the label). \"\"\" # loc_data[batch_size,", "Examples logit = logit.view(num, -1, self.num_classes) loss_c = torch.sum(F.cross_entropy(logit[mask], conf_t[mask][:, 0].long(), reduction='none') *", "truth boxes N: number of matched default boxes See: https://arxiv.org/pdf/1512.02325.pdf for more details.", "pos.float()).sum(1, keepdim=True).long() # Localization Loss (Smooth L1) # Shape: [batch,num_priors,4] loc_p = loc_data[pos]", "Targets: 1) Produce Confidence Target Indices by matching ground truth boxes with (default)", "neg_pos, neg_overlap, encode_target): super(MultiBoxLoss_combined, self).__init__() self.num_classes = num_classes self.threshold = overlap_thresh self.background_label =", "num_pos = (conf_t[:, :, 1] * pos.float()).sum(1, keepdim=True).long() # Localization Loss (Smooth L1)", "number of matched default boxes See: https://arxiv.org/pdf/1512.02325.pdf for more details. \"\"\" def __init__(self,", "conf_data[batch_size, num_priors, num_classes] # obj_data[batch_size, num_priors, 2] loc_data, conf_data, obj_data = predictions device", "F from utils.box_utils import match class MultiBoxLoss_combined(nn.Module): \"\"\"SSD Weighted Loss Function Compute Targets:", "self.num_classes) loss_c = torch.sum(F.cross_entropy(logit[mask], conf_t[mask][:, 0].long(), reduction='none') * weight) N = num_pos.sum() loss_l", "of matched default boxes See: https://arxiv.org/pdf/1512.02325.pdf for more details. \"\"\" def __init__(self, num_classes,", ":, 0] > 0).bool() # [num, num_priors] num_pos = (conf_t[:, :, 1] *", "conf across batch for hard negative mining (logit-combined) batch_obj = obj_data.view(-1, 2) #", "Args: predictions (tuple): A tuple containing loc preds, conf preds, and prior boxes", "2] loc_data, conf_data, obj_data = predictions device = loc_data.device targets = [anno.to(device) for", "obj_data[batch_size, num_priors, 2] loc_data, conf_data, obj_data = predictions device = loc_data.device targets =", "loss_c /= N loss_obj /= N return {'loss_box_reg': loss_l, 'loss_cls': loss_c, 'loss_obj': loss_obj}", "[anno.to(device) for anno in targets] num = loc_data.size(0) num_priors = priors.size(0) # match", "neg weight = conf_t[mask][:, 1] loss_obj = torch.sum(F.cross_entropy(obj_data[mask], obj_t[mask].long(), reduction='none') * weight) #", "pos boxes (label>0) and ignored boxes (label=-1) for now loss_obj = loss_obj.view(num, -1)", "matching ground truth boxes with (default) 'priorboxes' that have jaccard index > threshold", "out pos boxes (label>0) and ignored boxes (label=-1) for now loss_obj = loss_obj.view(num,", "self.neg_overlap = neg_overlap self.variance = [0.1, 0.2] def forward(self, predictions, priors, targets): \"\"\"Multibox", "bkg_label self.encode_target = encode_target self.use_prior_for_matching = prior_for_matching self.do_neg_mining = neg_mining self.negpos_ratio = neg_pos", "self.encode_target = encode_target self.use_prior_for_matching = prior_for_matching self.do_neg_mining = neg_mining self.negpos_ratio = neg_pos self.neg_overlap", "a batch, shape: [batch_size,num_objs,5] (last idx is the label). \"\"\" # loc_data[batch_size, num_priors,", "neg_overlap self.variance = [0.1, 0.2] def forward(self, predictions, priors, targets): \"\"\"Multibox Loss Args:", "priors, targets): \"\"\"Multibox Loss Args: predictions (tuple): A tuple containing loc preds, conf", "priors (default boxes) and ground truth boxes loc_t = torch.Tensor(num, num_priors, 4).to(device) conf_t", "that have jaccard index > threshold parameter (default threshold: 0.5). 2) Produce localization", "SSD net. conf shape: torch.size(batch_size,num_priors,num_classes) loc shape: torch.size(batch_size,num_priors,4) priors shape: torch.size(num_priors,4) ground_truth (tensor):", "# Shape: [batch,num_priors,4] loc_p = loc_data[pos] loc_t = loc_t[pos] loss_l = F.smooth_l1_loss(loc_p, loc_t,", "torch.cat((logit_0, logit_k), 1) # Confidence Loss Including Positive and Negative Examples logit =", "= loss_obj.view(num, -1) _, loss_idx = loss_obj.sort(1, descending=True) _, idx_rank = loss_idx.sort(1) num_neg", "torch.nn as nn import torch.nn.functional as F from utils.box_utils import match class MultiBoxLoss_combined(nn.Module):", "torch.size(num_priors,4) ground_truth (tensor): Ground truth boxes and labels for a batch, shape: [batch_size,num_objs,5]", "self.negpos_ratio = neg_pos self.neg_overlap = neg_overlap self.variance = [0.1, 0.2] def forward(self, predictions,", "L(x,c,l,g) = (Lconf(x, c) + αLloc(x,l,g)) / N Where, Lconf is the CrossEntropy", "conf_data, obj_data = predictions device = loc_data.device targets = [anno.to(device) for anno in", "idx_rank = loss_idx.sort(1) num_neg = torch.clamp(self.negpos_ratio * num_pos, max=num_priors - 1) neg =", "default boxes See: https://arxiv.org/pdf/1512.02325.pdf for more details. \"\"\" def __init__(self, num_classes, overlap_thresh, prior_for_matching,", "defaults = priors.data # [num_priors,4] match(self.threshold, truths, defaults, self.variance, labels, loc_t, conf_t, obj_t,", "= 0 # filter out pos boxes (label>0) and ignored boxes (label=-1) for", "loss_obj.view(num, -1) _, loss_idx = loss_obj.sort(1, descending=True) _, idx_rank = loss_idx.sort(1) num_neg =", "= (Lconf(x, c) + αLloc(x,l,g)) / N Where, Lconf is the CrossEntropy Loss", "= num_pos.sum() loss_l /= N loss_c /= N loss_obj /= N return {'loss_box_reg':", "= encode_target self.use_prior_for_matching = prior_for_matching self.do_neg_mining = neg_mining self.negpos_ratio = neg_pos self.neg_overlap =", "mining (logit-combined) batch_obj = obj_data.view(-1, 2) # [num*num_priors, 2] logit_0 = batch_obj[:, 0].unsqueeze(1)", "num_priors, 4] # conf_data[batch_size, num_priors, num_classes] # obj_data[batch_size, num_priors, 2] loc_data, conf_data, obj_data", "num = loc_data.size(0) num_priors = priors.size(0) # match priors (default boxes) and ground", "[0.1, 0.2] def forward(self, predictions, priors, targets): \"\"\"Multibox Loss Args: predictions (tuple): A", "α which is set to 1 by cross val. Args: c: class confidences,", "keepdim=True).long() # Localization Loss (Smooth L1) # Shape: [batch,num_priors,4] loc_p = loc_data[pos] loc_t", "loss_idx = loss_obj.sort(1, descending=True) _, idx_rank = loss_idx.sort(1) num_neg = torch.clamp(self.negpos_ratio * num_pos,", "variance into offsets of ground truth boxes and their matched 'priorboxes'. 3) Hard", "Loss weighted by α which is set to 1 by cross val. Args:", "def forward(self, predictions, priors, targets): \"\"\"Multibox Loss Args: predictions (tuple): A tuple containing", "logit = logit.view(num, -1, self.num_classes) loss_c = torch.sum(F.cross_entropy(logit[mask], conf_t[mask][:, 0].long(), reduction='none') * weight)", "Ground truth boxes and labels for a batch, shape: [batch_size,num_objs,5] (last idx is", "is the label). \"\"\" # loc_data[batch_size, num_priors, 4] # conf_data[batch_size, num_priors, num_classes] #", "priors shape: torch.size(num_priors,4) ground_truth (tensor): Ground truth boxes and labels for a batch,", "1] loss_l = torch.sum(torch.sum(loss_l, dim=1) * weight_pos) # Compute object loss across batch", "torch.no_grad(): loss_obj = F.cross_entropy(obj_data.view(-1, 2), obj_t.long().view(-1), reduction='none') # Hard Negative Mining loss_obj[obj_t.view(-1)] =", "predictions, priors, targets): \"\"\"Multibox Loss Args: predictions (tuple): A tuple containing loc preds,", "shape: torch.size(num_priors,4) ground_truth (tensor): Ground truth boxes and labels for a batch, shape:", "obj_t = torch.BoolTensor(num, num_priors).to(device) # match priors with gt for idx in range(num):", "matched 'priorboxes'. 3) Hard negative mining to filter the excessive number of negative", "Hard negative mining to filter the excessive number of negative examples that comes", "nn import torch.nn.functional as F from utils.box_utils import match class MultiBoxLoss_combined(nn.Module): \"\"\"SSD Weighted", "= batch_obj[:, 0].unsqueeze(1) + torch.log( torch.exp(batch_conf).sum(dim=1, keepdim=True)) logit_k = batch_obj[:, 1].unsqueeze(1).expand_as(batch_conf) + batch_conf", "Confidence Target Indices by matching ground truth boxes with (default) 'priorboxes' that have", "negative examples that comes with using a large number of default bounding boxes.", "# Compute max conf across batch for hard negative mining (logit-combined) batch_obj =", "CrossEntropy Loss and Lloc is the SmoothL1 Loss weighted by α which is", "Compute max conf across batch for hard negative mining (logit-combined) batch_obj = obj_data.view(-1,", "Including Positive and Negative Examples logit = logit.view(num, -1, self.num_classes) loss_c = torch.sum(F.cross_entropy(logit[mask],", "self.num_classes = num_classes self.threshold = overlap_thresh self.background_label = bkg_label self.encode_target = encode_target self.use_prior_for_matching", "filter out pos boxes (label>0) and ignored boxes (label=-1) for now loss_obj =", "Object Loss Including Positive and Negative Examples mask = pos | neg weight", "boxes. (default negative:positive ratio 3:1) Objective Loss: L(x,c,l,g) = (Lconf(x, c) + αLloc(x,l,g))", "boxes, g: ground truth boxes N: number of matched default boxes See: https://arxiv.org/pdf/1512.02325.pdf", "with (default) 'priorboxes' that have jaccard index > threshold parameter (default threshold: 0.5).", "Indices by matching ground truth boxes with (default) 'priorboxes' that have jaccard index", "= F.smooth_l1_loss(loc_p, loc_t, reduction='none') weight_pos = conf_t[pos][:, 1] loss_l = torch.sum(torch.sum(loss_l, dim=1) *", "4] labels = targets[idx][:, -2:].data # [obj_num] defaults = priors.data # [num_priors,4] match(self.threshold,", "conf shape: torch.size(batch_size,num_priors,num_classes) loc shape: torch.size(batch_size,num_priors,4) priors shape: torch.size(num_priors,4) ground_truth (tensor): Ground truth", "= torch.Tensor(num, num_priors, 2).to(device) obj_t = torch.BoolTensor(num, num_priors).to(device) # match priors with gt", "[num, num_priors] # Object Loss Including Positive and Negative Examples mask = pos", "truth boxes loc_t = torch.Tensor(num, num_priors, 4).to(device) conf_t = torch.Tensor(num, num_priors, 2).to(device) obj_t", "loc preds, conf preds, and prior boxes from SSD net. conf shape: torch.size(batch_size,num_priors,num_classes)", "hard negative mining (logit-combined) batch_obj = obj_data.view(-1, 2) # [num*num_priors, 2] logit_0 =", "[batch,num_priors,4] loc_p = loc_data[pos] loc_t = loc_t[pos] loss_l = F.smooth_l1_loss(loc_p, loc_t, reduction='none') weight_pos", "Loss Including Positive and Negative Examples mask = pos | neg weight =", ":-2].data # [obj_num, 4] labels = targets[idx][:, -2:].data # [obj_num] defaults = priors.data", "negative:positive ratio 3:1) Objective Loss: L(x,c,l,g) = (Lconf(x, c) + αLloc(x,l,g)) / N", "priors.data # [num_priors,4] match(self.threshold, truths, defaults, self.variance, labels, loc_t, conf_t, obj_t, idx) pos", "2) Produce localization target by 'encoding' variance into offsets of ground truth boxes", "overlap_thresh, prior_for_matching, bkg_label, neg_mining, neg_pos, neg_overlap, encode_target): super(MultiBoxLoss_combined, self).__init__() self.num_classes = num_classes self.threshold", "# [num_priors,4] match(self.threshold, truths, defaults, self.variance, labels, loc_t, conf_t, obj_t, idx) pos =", "import match class MultiBoxLoss_combined(nn.Module): \"\"\"SSD Weighted Loss Function Compute Targets: 1) Produce Confidence", "Loss (Smooth L1) # Shape: [batch,num_priors,4] loc_p = loc_data[pos] loc_t = loc_t[pos] loss_l", "1) Produce Confidence Target Indices by matching ground truth boxes with (default) 'priorboxes'", "weight) N = num_pos.sum() loss_l /= N loss_c /= N loss_obj /= N", "Args: c: class confidences, l: predicted boxes, g: ground truth boxes N: number", "ground truth boxes N: number of matched default boxes See: https://arxiv.org/pdf/1512.02325.pdf for more", "= loc_data.device targets = [anno.to(device) for anno in targets] num = loc_data.size(0) num_priors", "/= N loss_c /= N loss_obj /= N return {'loss_box_reg': loss_l, 'loss_cls': loss_c,", "F.smooth_l1_loss(loc_p, loc_t, reduction='none') weight_pos = conf_t[pos][:, 1] loss_l = torch.sum(torch.sum(loss_l, dim=1) * weight_pos)", "set to 1 by cross val. Args: c: class confidences, l: predicted boxes,", "batch_conf = conf_data.view(-1, self.num_classes-1) # Compute max conf across batch for hard negative", "boxes from SSD net. conf shape: torch.size(batch_size,num_priors,num_classes) loc shape: torch.size(batch_size,num_priors,4) priors shape: torch.size(num_priors,4)", "= F.cross_entropy(obj_data.view(-1, 2), obj_t.long().view(-1), reduction='none') # Hard Negative Mining loss_obj[obj_t.view(-1)] = 0 #", "torch.size(batch_size,num_priors,4) priors shape: torch.size(num_priors,4) ground_truth (tensor): Ground truth boxes and labels for a", "Lloc is the SmoothL1 Loss weighted by α which is set to 1", "threshold: 0.5). 2) Produce localization target by 'encoding' variance into offsets of ground", "= predictions device = loc_data.device targets = [anno.to(device) for anno in targets] num", "num_priors, 2] loc_data, conf_data, obj_data = predictions device = loc_data.device targets = [anno.to(device)", "num_neg = torch.clamp(self.negpos_ratio * num_pos, max=num_priors - 1) neg = idx_rank < num_neg.expand_as(idx_rank)", "pos [num, num_priors] # conf_data [num, num_priors, feature_dim] batch_conf = conf_data.view(-1, self.num_classes-1) #", "obj_t[mask].long(), reduction='none') * weight) # Confidence Loss (cosine distance to classes center) #", "more details. \"\"\" def __init__(self, num_classes, overlap_thresh, prior_for_matching, bkg_label, neg_mining, neg_pos, neg_overlap, encode_target):", "num_classes] # obj_data[batch_size, num_priors, 2] loc_data, conf_data, obj_data = predictions device = loc_data.device", "[batch_size,num_objs,5] (last idx is the label). \"\"\" # loc_data[batch_size, num_priors, 4] # conf_data[batch_size,", "match(self.threshold, truths, defaults, self.variance, labels, loc_t, conf_t, obj_t, idx) pos = (conf_t[:, :,", "1].unsqueeze(1).expand_as(batch_conf) + batch_conf logit = torch.cat((logit_0, logit_k), 1) # Confidence Loss Including Positive", "from utils.box_utils import match class MultiBoxLoss_combined(nn.Module): \"\"\"SSD Weighted Loss Function Compute Targets: 1)", "torch.Tensor(num, num_priors, 2).to(device) obj_t = torch.BoolTensor(num, num_priors).to(device) # match priors with gt for", "loc_data.size(0) num_priors = priors.size(0) # match priors (default boxes) and ground truth boxes", "< num_neg.expand_as(idx_rank) # [num, num_priors] # Object Loss Including Positive and Negative Examples", "(default boxes) and ground truth boxes loc_t = torch.Tensor(num, num_priors, 4).to(device) conf_t =", "num_priors, num_classes] # obj_data[batch_size, num_priors, 2] loc_data, conf_data, obj_data = predictions device =", "with gt for idx in range(num): # batch_size truths = targets[idx][:, :-2].data #", "g: ground truth boxes N: number of matched default boxes See: https://arxiv.org/pdf/1512.02325.pdf for", "bkg_label, neg_mining, neg_pos, neg_overlap, encode_target): super(MultiBoxLoss_combined, self).__init__() self.num_classes = num_classes self.threshold = overlap_thresh", "bounding boxes. (default negative:positive ratio 3:1) Objective Loss: L(x,c,l,g) = (Lconf(x, c) +", "loss_c = torch.sum(F.cross_entropy(logit[mask], conf_t[mask][:, 0].long(), reduction='none') * weight) N = num_pos.sum() loss_l /=", "max=num_priors - 1) neg = idx_rank < num_neg.expand_as(idx_rank) # [num, num_priors] # Object", "Mining loss_obj[obj_t.view(-1)] = 0 # filter out pos boxes (label>0) and ignored boxes", "= targets[idx][:, :-2].data # [obj_num, 4] labels = targets[idx][:, -2:].data # [obj_num] defaults", "classes center) # pos [num, num_priors] # conf_data [num, num_priors, feature_dim] batch_conf =", "weight) # Confidence Loss (cosine distance to classes center) # pos [num, num_priors]", "logit.view(num, -1, self.num_classes) loss_c = torch.sum(F.cross_entropy(logit[mask], conf_t[mask][:, 0].long(), reduction='none') * weight) N =", "= pos | neg weight = conf_t[mask][:, 1] loss_obj = torch.sum(F.cross_entropy(obj_data[mask], obj_t[mask].long(), reduction='none')", "(logit-combined) batch_obj = obj_data.view(-1, 2) # [num*num_priors, 2] logit_0 = batch_obj[:, 0].unsqueeze(1) +", "num_classes, overlap_thresh, prior_for_matching, bkg_label, neg_mining, neg_pos, neg_overlap, encode_target): super(MultiBoxLoss_combined, self).__init__() self.num_classes = num_classes", "= logit.view(num, -1, self.num_classes) loss_c = torch.sum(F.cross_entropy(logit[mask], conf_t[mask][:, 0].long(), reduction='none') * weight) N", "+ batch_conf logit = torch.cat((logit_0, logit_k), 1) # Confidence Loss Including Positive and", "def __init__(self, num_classes, overlap_thresh, prior_for_matching, bkg_label, neg_mining, neg_pos, neg_overlap, encode_target): super(MultiBoxLoss_combined, self).__init__() self.num_classes", "SmoothL1 Loss weighted by α which is set to 1 by cross val.", "number of default bounding boxes. (default negative:positive ratio 3:1) Objective Loss: L(x,c,l,g) =", "loss_obj = F.cross_entropy(obj_data.view(-1, 2), obj_t.long().view(-1), reduction='none') # Hard Negative Mining loss_obj[obj_t.view(-1)] = 0", "loc_t = loc_t[pos] loss_l = F.smooth_l1_loss(loc_p, loc_t, reduction='none') weight_pos = conf_t[pos][:, 1] loss_l", "Objective Loss: L(x,c,l,g) = (Lconf(x, c) + αLloc(x,l,g)) / N Where, Lconf is", "0].long(), reduction='none') * weight) N = num_pos.sum() loss_l /= N loss_c /= N", "prior boxes from SSD net. conf shape: torch.size(batch_size,num_priors,num_classes) loc shape: torch.size(batch_size,num_priors,4) priors shape:", "neg_pos self.neg_overlap = neg_overlap self.variance = [0.1, 0.2] def forward(self, predictions, priors, targets):", "net. conf shape: torch.size(batch_size,num_priors,num_classes) loc shape: torch.size(batch_size,num_priors,4) priors shape: torch.size(num_priors,4) ground_truth (tensor): Ground", "= neg_pos self.neg_overlap = neg_overlap self.variance = [0.1, 0.2] def forward(self, predictions, priors,", "and Lloc is the SmoothL1 Loss weighted by α which is set to", "forward(self, predictions, priors, targets): \"\"\"Multibox Loss Args: predictions (tuple): A tuple containing loc", "'priorboxes' that have jaccard index > threshold parameter (default threshold: 0.5). 2) Produce", "the label). \"\"\" # loc_data[batch_size, num_priors, 4] # conf_data[batch_size, num_priors, num_classes] # obj_data[batch_size,", "* weight_pos) # Compute object loss across batch for hard negative mining with", "A tuple containing loc preds, conf preds, and prior boxes from SSD net.", "anno in targets] num = loc_data.size(0) num_priors = priors.size(0) # match priors (default", "(tensor): Ground truth boxes and labels for a batch, shape: [batch_size,num_objs,5] (last idx", "truths, defaults, self.variance, labels, loc_t, conf_t, obj_t, idx) pos = (conf_t[:, :, 0]", "= loss_idx.sort(1) num_neg = torch.clamp(self.negpos_ratio * num_pos, max=num_priors - 1) neg = idx_rank", "their matched 'priorboxes'. 3) Hard negative mining to filter the excessive number of", "by cross val. Args: c: class confidences, l: predicted boxes, g: ground truth", "torch.sum(F.cross_entropy(logit[mask], conf_t[mask][:, 0].long(), reduction='none') * weight) N = num_pos.sum() loss_l /= N loss_c", "self.use_prior_for_matching = prior_for_matching self.do_neg_mining = neg_mining self.negpos_ratio = neg_pos self.neg_overlap = neg_overlap self.variance", "4] # conf_data[batch_size, num_priors, num_classes] # obj_data[batch_size, num_priors, 2] loc_data, conf_data, obj_data =", "match class MultiBoxLoss_combined(nn.Module): \"\"\"SSD Weighted Loss Function Compute Targets: 1) Produce Confidence Target", "+ αLloc(x,l,g)) / N Where, Lconf is the CrossEntropy Loss and Lloc is", "(conf_t[:, :, 1] * pos.float()).sum(1, keepdim=True).long() # Localization Loss (Smooth L1) # Shape:", "torch.sum(torch.sum(loss_l, dim=1) * weight_pos) # Compute object loss across batch for hard negative", "num_priors, feature_dim] batch_conf = conf_data.view(-1, self.num_classes-1) # Compute max conf across batch for", "| neg weight = conf_t[mask][:, 1] loss_obj = torch.sum(F.cross_entropy(obj_data[mask], obj_t[mask].long(), reduction='none') * weight)", "# batch_size truths = targets[idx][:, :-2].data # [obj_num, 4] labels = targets[idx][:, -2:].data", "jaccard index > threshold parameter (default threshold: 0.5). 2) Produce localization target by", "(last idx is the label). \"\"\" # loc_data[batch_size, num_priors, 4] # conf_data[batch_size, num_priors,", "labels, loc_t, conf_t, obj_t, idx) pos = (conf_t[:, :, 0] > 0).bool() #", "boxes (label=-1) for now loss_obj = loss_obj.view(num, -1) _, loss_idx = loss_obj.sort(1, descending=True)", "(label=-1) for now loss_obj = loss_obj.view(num, -1) _, loss_idx = loss_obj.sort(1, descending=True) _,", "idx) pos = (conf_t[:, :, 0] > 0).bool() # [num, num_priors] num_pos =", "as F from utils.box_utils import match class MultiBoxLoss_combined(nn.Module): \"\"\"SSD Weighted Loss Function Compute", "shape: torch.size(batch_size,num_priors,4) priors shape: torch.size(num_priors,4) ground_truth (tensor): Ground truth boxes and labels for", "distance to classes center) # pos [num, num_priors] # conf_data [num, num_priors, feature_dim]", "batch_conf logit = torch.cat((logit_0, logit_k), 1) # Confidence Loss Including Positive and Negative", "= batch_obj[:, 1].unsqueeze(1).expand_as(batch_conf) + batch_conf logit = torch.cat((logit_0, logit_k), 1) # Confidence Loss", "ground truth boxes and their matched 'priorboxes'. 3) Hard negative mining to filter", "'priorboxes'. 3) Hard negative mining to filter the excessive number of negative examples", "prior_for_matching, bkg_label, neg_mining, neg_pos, neg_overlap, encode_target): super(MultiBoxLoss_combined, self).__init__() self.num_classes = num_classes self.threshold =", "truth boxes and labels for a batch, shape: [batch_size,num_objs,5] (last idx is the", "N = num_pos.sum() loss_l /= N loss_c /= N loss_obj /= N return", "2] logit_0 = batch_obj[:, 0].unsqueeze(1) + torch.log( torch.exp(batch_conf).sum(dim=1, keepdim=True)) logit_k = batch_obj[:, 1].unsqueeze(1).expand_as(batch_conf)", "weight_pos = conf_t[pos][:, 1] loss_l = torch.sum(torch.sum(loss_l, dim=1) * weight_pos) # Compute object", "idx in range(num): # batch_size truths = targets[idx][:, :-2].data # [obj_num, 4] labels", "preds, and prior boxes from SSD net. conf shape: torch.size(batch_size,num_priors,num_classes) loc shape: torch.size(batch_size,num_priors,4)", "and labels for a batch, shape: [batch_size,num_objs,5] (last idx is the label). \"\"\"", "for idx in range(num): # batch_size truths = targets[idx][:, :-2].data # [obj_num, 4]", "of default bounding boxes. (default negative:positive ratio 3:1) Objective Loss: L(x,c,l,g) = (Lconf(x,", "boxes and their matched 'priorboxes'. 3) Hard negative mining to filter the excessive", "have jaccard index > threshold parameter (default threshold: 0.5). 2) Produce localization target", "truth boxes with (default) 'priorboxes' that have jaccard index > threshold parameter (default", "import torch.nn.functional as F from utils.box_utils import match class MultiBoxLoss_combined(nn.Module): \"\"\"SSD Weighted Loss", "# match priors with gt for idx in range(num): # batch_size truths =", "Negative Mining loss_obj[obj_t.view(-1)] = 0 # filter out pos boxes (label>0) and ignored", "and Negative Examples logit = logit.view(num, -1, self.num_classes) loss_c = torch.sum(F.cross_entropy(logit[mask], conf_t[mask][:, 0].long(),", "1 by cross val. Args: c: class confidences, l: predicted boxes, g: ground", "logit_k), 1) # Confidence Loss Including Positive and Negative Examples logit = logit.view(num,", "which is set to 1 by cross val. Args: c: class confidences, l:", "boxes See: https://arxiv.org/pdf/1512.02325.pdf for more details. \"\"\" def __init__(self, num_classes, overlap_thresh, prior_for_matching, bkg_label,", "# [obj_num, 4] labels = targets[idx][:, -2:].data # [obj_num] defaults = priors.data #", "conf_t[mask][:, 0].long(), reduction='none') * weight) N = num_pos.sum() loss_l /= N loss_c /=", "= torch.BoolTensor(num, num_priors).to(device) # match priors with gt for idx in range(num): #", "2).to(device) obj_t = torch.BoolTensor(num, num_priors).to(device) # match priors with gt for idx in", "(default threshold: 0.5). 2) Produce localization target by 'encoding' variance into offsets of", "* weight) # Confidence Loss (cosine distance to classes center) # pos [num,", "targets] num = loc_data.size(0) num_priors = priors.size(0) # match priors (default boxes) and", "logit_0 = batch_obj[:, 0].unsqueeze(1) + torch.log( torch.exp(batch_conf).sum(dim=1, keepdim=True)) logit_k = batch_obj[:, 1].unsqueeze(1).expand_as(batch_conf) +", "0.2] def forward(self, predictions, priors, targets): \"\"\"Multibox Loss Args: predictions (tuple): A tuple", "containing loc preds, conf preds, and prior boxes from SSD net. conf shape:", "loc_data.device targets = [anno.to(device) for anno in targets] num = loc_data.size(0) num_priors =", "priors.size(0) # match priors (default boxes) and ground truth boxes loc_t = torch.Tensor(num,", "Positive and Negative Examples mask = pos | neg weight = conf_t[mask][:, 1]", "excessive number of negative examples that comes with using a large number of", "1) # Confidence Loss Including Positive and Negative Examples logit = logit.view(num, -1,", "loss_obj = torch.sum(F.cross_entropy(obj_data[mask], obj_t[mask].long(), reduction='none') * weight) # Confidence Loss (cosine distance to", "torch.clamp(self.negpos_ratio * num_pos, max=num_priors - 1) neg = idx_rank < num_neg.expand_as(idx_rank) # [num,", "Negative Examples mask = pos | neg weight = conf_t[mask][:, 1] loss_obj =", "loss_idx.sort(1) num_neg = torch.clamp(self.negpos_ratio * num_pos, max=num_priors - 1) neg = idx_rank <", "obj_t, idx) pos = (conf_t[:, :, 0] > 0).bool() # [num, num_priors] num_pos", "neg_overlap, encode_target): super(MultiBoxLoss_combined, self).__init__() self.num_classes = num_classes self.threshold = overlap_thresh self.background_label = bkg_label", "1] * pos.float()).sum(1, keepdim=True).long() # Localization Loss (Smooth L1) # Shape: [batch,num_priors,4] loc_p", "overlap_thresh self.background_label = bkg_label self.encode_target = encode_target self.use_prior_for_matching = prior_for_matching self.do_neg_mining = neg_mining", "idx is the label). \"\"\" # loc_data[batch_size, num_priors, 4] # conf_data[batch_size, num_priors, num_classes]", "Target Indices by matching ground truth boxes with (default) 'priorboxes' that have jaccard", "/ N Where, Lconf is the CrossEntropy Loss and Lloc is the SmoothL1", "with using a large number of default bounding boxes. (default negative:positive ratio 3:1)", "to classes center) # pos [num, num_priors] # conf_data [num, num_priors, feature_dim] batch_conf", "# [num, num_priors] # Object Loss Including Positive and Negative Examples mask =", "class MultiBoxLoss_combined(nn.Module): \"\"\"SSD Weighted Loss Function Compute Targets: 1) Produce Confidence Target Indices", "= torch.sum(F.cross_entropy(obj_data[mask], obj_t[mask].long(), reduction='none') * weight) # Confidence Loss (cosine distance to classes", "details. \"\"\" def __init__(self, num_classes, overlap_thresh, prior_for_matching, bkg_label, neg_mining, neg_pos, neg_overlap, encode_target): super(MultiBoxLoss_combined,", "using a large number of default bounding boxes. (default negative:positive ratio 3:1) Objective", "(tuple): A tuple containing loc preds, conf preds, and prior boxes from SSD", "pos | neg weight = conf_t[mask][:, 1] loss_obj = torch.sum(F.cross_entropy(obj_data[mask], obj_t[mask].long(), reduction='none') *", "super(MultiBoxLoss_combined, self).__init__() self.num_classes = num_classes self.threshold = overlap_thresh self.background_label = bkg_label self.encode_target =", "loss_l /= N loss_c /= N loss_obj /= N return {'loss_box_reg': loss_l, 'loss_cls':", "loc_t, conf_t, obj_t, idx) pos = (conf_t[:, :, 0] > 0).bool() # [num,", "= torch.clamp(self.negpos_ratio * num_pos, max=num_priors - 1) neg = idx_rank < num_neg.expand_as(idx_rank) #", "by α which is set to 1 by cross val. Args: c: class", "= overlap_thresh self.background_label = bkg_label self.encode_target = encode_target self.use_prior_for_matching = prior_for_matching self.do_neg_mining =", "self.num_classes-1) # Compute max conf across batch for hard negative mining (logit-combined) batch_obj", "(label>0) and ignored boxes (label=-1) for now loss_obj = loss_obj.view(num, -1) _, loss_idx", "to filter the excessive number of negative examples that comes with using a", "loss_l = F.smooth_l1_loss(loc_p, loc_t, reduction='none') weight_pos = conf_t[pos][:, 1] loss_l = torch.sum(torch.sum(loss_l, dim=1)", "= conf_t[pos][:, 1] loss_l = torch.sum(torch.sum(loss_l, dim=1) * weight_pos) # Compute object loss", "center) # pos [num, num_priors] # conf_data [num, num_priors, feature_dim] batch_conf = conf_data.view(-1,", "in range(num): # batch_size truths = targets[idx][:, :-2].data # [obj_num, 4] labels =", "> 0).bool() # [num, num_priors] num_pos = (conf_t[:, :, 1] * pos.float()).sum(1, keepdim=True).long()", "of ground truth boxes and their matched 'priorboxes'. 3) Hard negative mining to", "(conf_t[:, :, 0] > 0).bool() # [num, num_priors] num_pos = (conf_t[:, :, 1]", "+ torch.log( torch.exp(batch_conf).sum(dim=1, keepdim=True)) logit_k = batch_obj[:, 1].unsqueeze(1).expand_as(batch_conf) + batch_conf logit = torch.cat((logit_0,", "localization target by 'encoding' variance into offsets of ground truth boxes and their", "0].unsqueeze(1) + torch.log( torch.exp(batch_conf).sum(dim=1, keepdim=True)) logit_k = batch_obj[:, 1].unsqueeze(1).expand_as(batch_conf) + batch_conf logit =", "large number of default bounding boxes. (default negative:positive ratio 3:1) Objective Loss: L(x,c,l,g)", "\"\"\"Multibox Loss Args: predictions (tuple): A tuple containing loc preds, conf preds, and", "conf_t = torch.Tensor(num, num_priors, 2).to(device) obj_t = torch.BoolTensor(num, num_priors).to(device) # match priors with", "boxes loc_t = torch.Tensor(num, num_priors, 4).to(device) conf_t = torch.Tensor(num, num_priors, 2).to(device) obj_t =", "Compute object loss across batch for hard negative mining with torch.no_grad(): loss_obj =", "batch for hard negative mining with torch.no_grad(): loss_obj = F.cross_entropy(obj_data.view(-1, 2), obj_t.long().view(-1), reduction='none')", "priors with gt for idx in range(num): # batch_size truths = targets[idx][:, :-2].data", "logit = torch.cat((logit_0, logit_k), 1) # Confidence Loss Including Positive and Negative Examples" ]
[ "with pytest.raises(SystemExit): parser.parse_args([]) parser.parse_args([\"--required=20\"]) assert parser.required == 20 def test_log_group(): class LogGroup(argclass.Group): level:", "\"5\"]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([1, 2, 3, 4, 5]) def", "= Parser() parser.parse_args([]) finally: del os.environ['NARGS'] assert parser.nargs == frozenset({1, 2, 3}) def", "= self.Parser() r = repr(parser) assert r == \"<Parser: 1 arguments, 2 groups,", "test_simple(self): parser = self.Parser() parser.parse_args([\"1\", \"2\", \"3\"]) assert parser.integers assert parser.integers == [1,", "parser.spam == [4] def test_group_aliases(): class Group(argclass.Group): foo: str = argclass.Argument(\"-F\") class Parser(argclass.Parser):", "os.environ[env_var] = expected request.addfinalizer(lambda: os.environ.pop(env_var)) parser = Parser() parser.parse_args([]) assert parser.foo == expected", "= Parser() parser.parse_args([]) assert parser.log.level == logging.INFO assert parser.log.format == \"stream\" parser.parse_args([\"--log-level=debug\", \"--log-format=json\"])", "fp.write(\"nargs = [1, 2, 3, 4]\\n\") parser = Parser(config_files=[conf_file]) parser.parse_args([]) assert parser.nargs ==", "and port\", prefix=\"api\", defaults={ \"port\": 80, \"host\": \"0.0.0.0\", }, ) grpc: HostPortGroup =", ") def test_simple(self): parser = self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo == \"bar\" parser.parse_args([\"--foo=bar\"])", "parser = self.Parser() r = repr(parser) assert r == \"<Parser: 1 arguments, 2", "parser.foo == \"spam\" assert parser.bar == 1 def test_print_help(capsys: pytest.CaptureFixture): class Parser(argclass.Parser): foo:", "assert \"--help\" in captured.out assert \"--foo FOO\" in captured.out assert \"[--bar BAR]\" in", "variant in (\"no\", \"crap\", \"false\", \"disabled\", \"MY_HANDS_TYPING_WORDS\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is False def", "= Parser(config_files=[config_path]) parser.parse_args([]) assert parser.required == 10 parser = Parser(config_files=[]) with pytest.raises(SystemExit): parser.parse_args([])", "assert parser.args_set == frozenset([]) parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\", \"4\", \"5\"]) assert isinstance(parser.args_set, frozenset)", "\"[1, 2, 3]\" try: parser = Parser() parser.parse_args([]) finally: del os.environ['NARGS'] assert parser.nargs", "class Parser(argclass.Parser): integers: List[int] = argclass.Argument( \"integers\", type=int, nargs=argclass.Nargs.ONE_OR_MORE, metavar=\"N\", help=\"an integer for", "test_nargs_config_list(tmp_path): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) conf_file", "assert \"--foo\" in captured.out assert \"--bar\" in captured.out assert \"--help\" in captured.out assert", "\"json\" def test_environment_required(): class Parser(argclass.Parser): required: int parser = Parser(auto_env_var_prefix=\"TEST_\") os.environ['TEST_REQUIRED'] = \"100\"", "parser.foo == [1, 2] assert parser.bar == [3] assert parser.spam == [4] def", "= tmp_path / \"config.ini\" with open(conf_file, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = [1,", "pytest.FixtureRequest): env_var = re.sub(r\"\\d+\", \"\", uuid.uuid4().hex + uuid.uuid4().hex).upper() class Parser(argclass.Parser): foo: str =", "== \"<Parser: 1 arguments, 2 groups, 0 subparsers>\" def test_access_to_not_parsed_attrs(self): parser = self.Parser()", "\"--api-host=127.0.0.1\", \"--api-port=8080\", \"--grpc-host=127.0.0.2\", \"--grpc-port=9000\", ]) assert parser.foo == \"bar\" assert parser.http.host == \"127.0.0.1\"", "== frozenset({1, 2, 3}) def test_nargs_config_list(tmp_path): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int,", "(\"no\", \"crap\", \"false\", \"disabled\", \"MY_HANDS_TYPING_WORDS\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is False def test_argument_defaults(): class", "int parser = Parser(auto_env_var_prefix=\"TEST_\") os.environ['TEST_REQUIRED'] = \"100\" parser.parse_args([]) assert parser.required == 100 os.environ.pop('TEST_REQUIRED')", "fp.write(\"nargs = {1, 2, 3, 4}\\n\") parser = Parser(config_files=[conf_file]) parser.parse_args([]) assert parser.nargs ==", "Parser(argclass.Parser): debug: bool = False confused_default: bool = True pool_size: int = 4", "== \"stream\" parser.parse_args([\"--log-level=debug\", \"--log-format=json\"]) assert parser.log.level == logging.DEBUG assert parser.log.format == \"json\" def", "fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = {1, 2, 3, 4}\\n\") parser = Parser(config_files=[conf_file]) parser.parse_args([]) assert parser.nargs", "uuid.uuid4().hex).upper() class Parser(argclass.Parser): foo: str = argclass.Argument(env_var=env_var) expected = uuid.uuid4().hex os.environ[env_var] = expected", "int = 2 parser = Parser() parser.parse_args([]) assert parser.debug is False assert parser.confused_default", "= Parser() parser.print_help() captured = capsys.readouterr() assert \"--foo\" in captured.out assert \"--bar\" in", "nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) os.environ['NARGS'] = \"[1, 2,", "= Parser() parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\", \"4\", \"5\"]) assert isinstance(parser.args_set, frozenset) assert parser.args_set", "Parser() parser.parse_args([]) assert parser.debug is False assert parser.confused_default is True assert parser.pool_size ==", "4, 5]) def test_nargs_1(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=1, converter=frozenset", "int = argclass.Argument(required=True) config_path = tmp_path / \"config.ini\" with open(config_path, \"w\") as fp:", "= argclass.Argument( type=int, nargs=1, converter=frozenset ) parser = Parser() parser.parse_args([]) assert isinstance(parser.args_set, frozenset)", "assert parser.foo == [1, 2] assert parser.bar == [3] assert parser.spam == [4]", "parser.parse_args([]) assert parser.log.level == logging.ERROR assert parser.log.format == \"json\" def test_environment_required(): class Parser(argclass.Parser):", "config_path = tmp_path / \"config.ini\" with open(config_path, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"required =", "test_inheritance(): class AddressPort(argclass.Group): address: str port: int class Parser(argclass.Parser, AddressPort): pass parser =", "env_var=\"NARGS\" ) conf_file = tmp_path / \"config.ini\" with open(conf_file, \"w\") as fp: fp.write(\"[DEFAULT]\\n\")", "assert parser.optional is True for variant in (\"no\", \"crap\", \"false\", \"disabled\", \"MY_HANDS_TYPING_WORDS\"): parser.parse_args([f\"--optional={variant}\"])", "log = LogGroup() parser = Parser() parser.parse_args([]) assert parser.log.level == logging.INFO assert parser.log.format", "\"host\": \"::\"}, ) def test_simple(self): parser = self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo ==", "= argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) os.environ['NARGS'] = \"[1, 2, 3]\" try:", "assert parser.group.foo == \"egg\" def test_short_parser_definition(): class Parser(argclass.Parser): foo: str bar: int parser", "uuid.uuid4().hex os.environ[f\"{prefix}_FOO\"] = expected request.addfinalizer(lambda: os.environ.pop(f\"{prefix}_FOO\")) parser = self.Parser(auto_env_var_prefix=f\"{prefix}_\") parser.parse_args([]) assert parser.foo ==", "expected = uuid.uuid4().hex os.environ[env_var] = expected request.addfinalizer(lambda: os.environ.pop(env_var)) parser = Parser() parser.parse_args([]) assert", "test_argument_defaults(): class Parser(argclass.Parser): debug: bool = False confused_default: bool = True pool_size: int", "assert parser.http.host == \"0.0.0.0\" assert parser.http.port == 80 assert parser.grpc.host == \"::\" assert", "\"MY_HANDS_TYPING_WORDS\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is False def test_argument_defaults(): class Parser(argclass.Parser): debug: bool =", "parser = Parser() parser.parse_args([]) finally: del os.environ['NARGS'] assert parser.nargs == frozenset({1, 2, 3})", "def test_nargs_and_converter_not_required(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset ) parser", "for variant in (\"no\", \"crap\", \"false\", \"disabled\", \"MY_HANDS_TYPING_WORDS\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is False", "type=int, nargs=\"+\", converter=frozenset ) parser = Parser() parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\", \"4\", \"5\"])", "frozenset([1, 2, 3, 4, 5]) def test_nargs_and_converter_not_required(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument(", "\"0.0.0.0\" assert parser.port == 9876 def test_config_for_required(tmp_path): class Parser(argclass.Parser): required: int = argclass.Argument(required=True)", "\"3\"]) assert parser.integers assert parser.integers == [1, 2, 3] class HostPortGroup(argclass.Group): host: str", "\"egg\" def test_short_parser_definition(): class Parser(argclass.Parser): foo: str bar: int parser = Parser() parser.parse_args([\"--foo=spam\",", "nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) conf_file = tmp_path / \"config.ini\" with open(conf_file, \"w\") as", "== 9876 def test_config_for_required(tmp_path): class Parser(argclass.Parser): required: int = argclass.Argument(required=True) config_path = tmp_path", "test_config_for_required(tmp_path): class Parser(argclass.Parser): required: int = argclass.Argument(required=True) config_path = tmp_path / \"config.ini\" with", "bool = False confused_default: bool = True pool_size: int = 4 forks: int", "\"4\", \"5\"]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([1, 2, 3, 4, 5])", "= Parser() parser.parse_args([]) assert parser.foo == expected def test_nargs(): class Parser(argclass.Parser): foo: List[int]", "assert parser.optional is None assert not parser.flag parser.parse_args([\"--flag\"]) assert parser.flag for variant in", "]) assert parser.foo == \"bar\" assert parser.http.host == \"127.0.0.1\" assert parser.http.port == 8080", "parser.parse_args([\"--log-level\", \"info\"]) assert parser.log_level == logging.INFO parser.parse_args([\"--log-level=warning\"]) assert parser.log_level == logging.WARNING def test_optional_type():", "test_group_defaults(self): parser = self.Parser() parser.parse_args([\"--foo=bar\"]) assert parser.foo == \"bar\" assert parser.http.host == \"0.0.0.0\"", "def test_group_aliases(): class Group(argclass.Group): foo: str = argclass.Argument(\"-F\") class Parser(argclass.Parser): group = Group()", "is False def test_argument_defaults(): class Parser(argclass.Parser): debug: bool = False confused_default: bool =", "assert parser.nargs == frozenset({1, 2, 3}) def test_nargs_config_list(tmp_path): class Parser(argclass.Parser): nargs: FrozenSet[int] =", "foo: str bar: int = 0 parser = Parser() parser.print_help() captured = capsys.readouterr()", "\"yeS\", \"enable\", \"ENABLED\", \"1\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is True for variant in (\"no\",", "BAR]\" in captured.out def test_print_log_level(capsys: pytest.CaptureFixture): class Parser(argclass.Parser): log_level: int = argclass.LogLevel parser", "TestBasics: class Parser(argclass.Parser): integers: List[int] = argclass.Argument( \"integers\", type=int, nargs=argclass.Nargs.ONE_OR_MORE, metavar=\"N\", help=\"an integer", "Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset ) parser = Parser() parser.parse_args([])", "[1, 2] assert parser.bar == [3] assert parser.spam == [4] def test_group_aliases(): class", "parser = Parser() with pytest.raises(SystemExit): parser.parse_args([]) parser.parse_args([\"--required=20\"]) assert parser.required == 20 def test_log_group():", "tmp_path / \"config.ini\" with open(config_path, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"required = 10\\n\") fp.write(\"\\n\")", "False assert parser.pool_size == 2 assert parser.forks == 8 def test_inheritance(): class AddressPort(argclass.Group):", "assert parser.args_set == frozenset([]) parser.parse_args([\"--args-set\", \"1\"]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([1])", "Parser(argclass.Parser, AddressPort): pass parser = Parser() parser.parse_args([\"--address=0.0.0.0\", \"--port=9876\"]) assert parser.address == \"0.0.0.0\" assert", "parser.parse_args([]) parser.parse_args([\"--required=20\"]) assert parser.required == 20 def test_log_group(): class LogGroup(argclass.Group): level: int =", "= Group() parser = Parser() parser.parse_args([\"-F\", \"egg\"]) assert parser.group.foo == \"egg\" def test_short_parser_definition():", "isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([1]) def test_nargs_env_var(): class Parser(argclass.Parser): nargs: FrozenSet[int] =", "= Parser() parser.parse_args([]) assert parser.log.level == logging.ERROR assert parser.log.format == \"json\" def test_environment_required():", "parser = Parser(auto_env_var_prefix=\"TEST_\") os.environ['TEST_REQUIRED'] = \"100\" parser.parse_args([]) assert parser.required == 100 os.environ.pop('TEST_REQUIRED') with", "Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=1, converter=frozenset ) parser = Parser() parser.parse_args([])", "== \"0.0.0.0\" assert parser.port == 9876 def test_config_for_required(tmp_path): class Parser(argclass.Parser): required: int =", "test_nargs(): class Parser(argclass.Parser): foo: List[int] = argclass.Argument( nargs=argclass.Nargs.ZERO_OR_MORE, type=int, ) bar: int =", "log = LogGroup(defaults=dict(format=\"json\", level=\"error\")) parser = Parser() parser.parse_args([]) assert parser.log.level == logging.ERROR assert", "isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([1, 2, 3, 4, 5]) def test_nargs_and_converter_not_required(): class", "assert parser.log.level == logging.ERROR assert parser.log.format == \"json\" def test_environment_required(): class Parser(argclass.Parser): required:", "the integers (default: find the max)\", ) def test_simple(self): parser = self.Parser() parser.parse_args([\"1\",", "parser = Parser() parser.print_help() captured = capsys.readouterr() assert \"--foo\" in captured.out assert \"--bar\"", "int class Parser(argclass.Parser, AddressPort): pass parser = Parser() parser.parse_args([\"--address=0.0.0.0\", \"--port=9876\"]) assert parser.address ==", "os.environ.pop('TEST_REQUIRED') with pytest.raises(SystemExit): parser.parse_args([]) def test_nargs_and_converter(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int,", "3, 4]\\n\") parser = Parser(config_files=[conf_file]) parser.parse_args([]) assert parser.nargs == frozenset({1, 2, 3, 4})", "2, 3, 4, 5]) def test_nargs_and_converter_not_required(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int,", "test_nargs_env_var(): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) os.environ['NARGS']", "class Parser(argclass.Parser): debug: bool = False confused_default: bool = True pool_size: int =", "Parser(argclass.Parser): integers: List[int] = argclass.Argument( \"integers\", type=int, nargs=argclass.Nargs.ONE_OR_MORE, metavar=\"N\", help=\"an integer for the", "= self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo == \"bar\" parser.parse_args([ \"--foo=bar\", \"--api-host=127.0.0.1\", \"--api-port=8080\", \"--grpc-host=127.0.0.2\",", "test_short_parser_definition(): class Parser(argclass.Parser): foo: str bar: int parser = Parser() parser.parse_args([\"--foo=spam\", \"--bar=1\"]) assert", "pytest.raises(SystemExit): parser.parse_args([]) def test_nargs_and_converter(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=\"+\", converter=frozenset", "2 parser.parse_args([ \"--debug\", \"--forks=8\", \"--pool-size=2\", \"--confused-default\", ]) assert parser.debug is True assert parser.confused_default", "\"127.0.0.2\" assert parser.grpc.port == 9000 def test_group_defaults(self): parser = self.Parser() parser.parse_args([\"--foo=bar\"]) assert parser.foo", "choices=(\"json\", \"stream\"), default=\"stream\" ) class Parser(argclass.Parser): log = LogGroup() parser = Parser() parser.parse_args([])", "10 parser = Parser(config_files=[]) with pytest.raises(SystemExit): parser.parse_args([]) def test_minimal_optional(tmp_path): class Parser(argclass.Parser): optional: Optional[int]", "parser.required == 20 def test_log_group(): class LogGroup(argclass.Group): level: int = argclass.LogLevel format =", "== [3] assert parser.spam == [4] def test_group_aliases(): class Group(argclass.Group): foo: str =", "class TestFoo: class Parser(argclass.Parser): foo: str = argclass.Argument(help=\"foo\") http: HostPortGroup = HostPortGroup( title=\"HTTP", "env_var = re.sub(r\"\\d+\", \"\", uuid.uuid4().hex + uuid.uuid4().hex).upper() class Parser(argclass.Parser): foo: str = argclass.Argument(env_var=env_var)", "\"--debug\", \"--forks=8\", \"--pool-size=2\", \"--confused-default\", ]) assert parser.debug is True assert parser.confused_default is False", "class Parser(argclass.Parser): group = Group() parser = Parser() parser.parse_args([\"-F\", \"egg\"]) assert parser.group.foo ==", "argclass.Argument(env_var=env_var) expected = uuid.uuid4().hex os.environ[env_var] = expected request.addfinalizer(lambda: os.environ.pop(env_var)) parser = Parser() parser.parse_args([])", "True assert parser.confused_default is False assert parser.pool_size == 2 assert parser.forks == 8", "Parser() parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\", \"4\", \"5\"]) assert isinstance(parser.args_set, frozenset) assert parser.args_set ==", "parser.required == 10 parser = Parser(config_files=[]) with pytest.raises(SystemExit): parser.parse_args([]) def test_minimal_optional(tmp_path): class Parser(argclass.Parser):", "type=int, nargs=1, converter=frozenset ) parser = Parser() parser.parse_args([]) assert isinstance(parser.args_set, frozenset) assert parser.args_set", "parser.parse_args([]) assert parser.required == 10 parser = Parser(config_files=[]) with pytest.raises(SystemExit): parser.parse_args([]) def test_minimal_optional(tmp_path):", "2] assert parser.bar == [3] assert parser.spam == [4] def test_group_aliases(): class Group(argclass.Group):", "= re.sub(r\"\\d+\", \"\", uuid.uuid4().hex + uuid.uuid4().hex).upper() class Parser(argclass.Parser): foo: str = argclass.Argument(env_var=env_var) expected", "Parser() parser.parse_args([\"--foo\", \"1\", \"2\", \"--bar=3\", \"--spam=4\"]) assert parser.foo == [1, 2] assert parser.bar", "== frozenset([1, 2, 3, 4, 5]) def test_nargs_and_converter_not_required(): class Parser(argclass.Parser): args_set: FrozenSet[int] =", "r == \"<Parser: 1 arguments, 2 groups, 0 subparsers>\" def test_access_to_not_parsed_attrs(self): parser =", "test_group_aliases(): class Group(argclass.Group): foo: str = argclass.Argument(\"-F\") class Parser(argclass.Parser): group = Group() parser", "with pytest.raises(SystemExit): parser.parse_args([]) def test_nargs_and_converter(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=\"+\",", "\"2\", \"3\", \"4\", \"5\"]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([1, 2, 3,", ") accumulate = argclass.Argument( \"--sum\", action=argclass.Actions.STORE_CONST, const=sum, default=max, help=\"sum the integers (default: find", "help=\"sum the integers (default: find the max)\", ) def test_simple(self): parser = self.Parser()", "parser.forks == 2 parser.parse_args([ \"--debug\", \"--forks=8\", \"--pool-size=2\", \"--confused-default\", ]) assert parser.debug is True", "argclass.Argument(help=\"foo\") http: HostPortGroup = HostPortGroup( title=\"HTTP host and port\", prefix=\"api\", defaults={ \"port\": 80,", "Group() parser = Parser() parser.parse_args([\"-F\", \"egg\"]) assert parser.group.foo == \"egg\" def test_short_parser_definition(): class", "AddressPort): pass parser = Parser() parser.parse_args([\"--address=0.0.0.0\", \"--port=9876\"]) assert parser.address == \"0.0.0.0\" assert parser.port", "parser.grpc.port == 9000 def test_group_defaults(self): parser = self.Parser() parser.parse_args([\"--foo=bar\"]) assert parser.foo == \"bar\"", "test_print_log_level(capsys: pytest.CaptureFixture): class Parser(argclass.Parser): log_level: int = argclass.LogLevel parser = Parser() parser.parse_args([\"--log-level\", \"info\"])", "type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) conf_file = tmp_path / \"config.ini\" with open(conf_file, \"w\")", "class Group(argclass.Group): foo: str = argclass.Argument(\"-F\") class Parser(argclass.Parser): group = Group() parser =", "LogGroup(argclass.Group): level: int = argclass.LogLevel format = argclass.Argument( choices=(\"json\", \"stream\"), default=\"stream\" ) class", "\"stream\" parser.parse_args([\"--log-level=debug\", \"--log-format=json\"]) assert parser.log.level == logging.DEBUG assert parser.log.format == \"json\" def test_log_group_defaults():", "str = argclass.Argument( choices=(\"json\", \"stream\") ) class Parser(argclass.Parser): log = LogGroup(defaults=dict(format=\"json\", level=\"error\")) parser", "0 parser = Parser() parser.print_help() captured = capsys.readouterr() assert \"--foo\" in captured.out assert", "parser.foo == \"bar\" assert parser.http.host == \"127.0.0.1\" assert parser.http.port == 8080 assert parser.grpc.host", "parser.parse_args([]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([]) parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\", \"4\",", "Parser(argclass.Parser): optional: Optional[int] parser = Parser() parser.parse_args([]) assert parser.optional is None parser.parse_args([\"--optional=10\"]) assert", "\"--spam=4\"]) assert parser.foo == [1, 2] assert parser.bar == [3] assert parser.spam ==", "fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = {1, 2, 3, 4}\\n\") parser = Parser(config_files=[conf_file]) parser.parse_args([]) assert", "\"bar\" parser.parse_args([\"--foo=bar\"]) assert parser.foo == \"bar\" def test_group(self): parser = self.Parser() parser.parse_args([\"--foo\", \"bar\"])", "= expected request.addfinalizer(lambda: os.environ.pop(f\"{prefix}_FOO\")) parser = self.Parser(auto_env_var_prefix=f\"{prefix}_\") parser.parse_args([]) assert parser.foo == expected def", "LogGroup() parser = Parser() parser.parse_args([]) assert parser.log.level == logging.INFO assert parser.log.format == \"stream\"", "== frozenset([1]) def test_nargs_env_var(): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset,", "\"--foo\" in captured.out assert \"--bar\" in captured.out assert \"--help\" in captured.out assert \"--foo", "= Parser(config_files=[conf_file]) parser.parse_args([]) assert parser.nargs == frozenset({1, 2, 3, 4}) def test_nargs_config_set(tmp_path): class", "0 subparsers>\" def test_access_to_not_parsed_attrs(self): parser = self.Parser() with pytest.raises(AttributeError): _ = parser.foo def", "2, 3]\" try: parser = Parser() parser.parse_args([]) finally: del os.environ['NARGS'] assert parser.nargs ==", "metavar=\"N\", help=\"an integer for the accumulator\", ) accumulate = argclass.Argument( \"--sum\", action=argclass.Actions.STORE_CONST, const=sum,", "== \"egg\" def test_short_parser_definition(): class Parser(argclass.Parser): foo: str bar: int parser = Parser()", "re import uuid from typing import List, Optional, FrozenSet import pytest import argclass", "host and port\", prefix=\"api\", defaults={ \"port\": 80, \"host\": \"0.0.0.0\", }, ) grpc: HostPortGroup", "None parser.parse_args([\"--optional=10\"]) assert parser.optional == 10 def test_optional_is_not_required(tmp_path): class Parser(argclass.Parser): optional: Optional[int] =", "== logging.DEBUG assert parser.log.format == \"json\" def test_log_group_defaults(): class LogGroup(argclass.Group): level: int =", "== expected def test_env_var(request: pytest.FixtureRequest): env_var = re.sub(r\"\\d+\", \"\", uuid.uuid4().hex + uuid.uuid4().hex).upper() class", "== 80 assert parser.grpc.host == \"::\" assert parser.grpc.port == 6000 def test_parser_repr(self): parser", "parser = Parser() parser.parse_args([]) assert parser.debug is False assert parser.confused_default is True assert", "Optional[bool] parser = Parser() parser.parse_args([]) assert parser.optional is None assert not parser.flag parser.parse_args([\"--flag\"])", "\"--api-port=8080\", \"--grpc-host=127.0.0.2\", \"--grpc-port=9000\", ]) assert parser.foo == \"bar\" assert parser.http.host == \"127.0.0.1\" assert", "class Parser(argclass.Parser): optional: Optional[int] = argclass.Argument(required=False) parser = Parser() parser.parse_args([]) assert parser.optional is", "int = argclass.LogLevel format: str = argclass.Argument( choices=(\"json\", \"stream\") ) class Parser(argclass.Parser): log", "= 0 parser = Parser() parser.print_help() captured = capsys.readouterr() assert \"--foo\" in captured.out", "int = 4 forks: int = 2 parser = Parser() parser.parse_args([]) assert parser.debug", "= Parser() parser.parse_args([\"--address=0.0.0.0\", \"--port=9876\"]) assert parser.address == \"0.0.0.0\" assert parser.port == 9876 def", "class Parser(argclass.Parser): foo: List[int] = argclass.Argument( nargs=argclass.Nargs.ZERO_OR_MORE, type=int, ) bar: int = argclass.Argument(nargs=\"*\")", "class Parser(argclass.Parser): required: int parser = Parser() with pytest.raises(SystemExit): parser.parse_args([]) parser.parse_args([\"--required=20\"]) assert parser.required", "+ uuid.uuid4().hex).upper() class Parser(argclass.Parser): foo: str = argclass.Argument(env_var=env_var) expected = uuid.uuid4().hex os.environ[env_var] =", "\"--foo FOO\" in captured.out assert \"[--bar BAR]\" in captured.out def test_print_log_level(capsys: pytest.CaptureFixture): class", "in captured.out def test_print_log_level(capsys: pytest.CaptureFixture): class Parser(argclass.Parser): log_level: int = argclass.LogLevel parser =", "is False assert parser.pool_size == 2 assert parser.forks == 8 def test_inheritance(): class", "\"bar\" assert parser.http.host == \"127.0.0.1\" assert parser.http.port == 8080 assert parser.grpc.host == \"127.0.0.2\"", "parser.parse_args([]) assert parser.optional is None parser.parse_args([\"--optional=10\"]) assert parser.optional == 10 def test_optional_is_not_required(tmp_path): class", "4, 5]) def test_nargs_and_converter_not_required(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset", "HostPortGroup( title=\"GRPC host and port\", defaults={\"port\": 6000, \"host\": \"::\"}, ) def test_simple(self): parser", "arguments, 2 groups, 0 subparsers>\" def test_access_to_not_parsed_attrs(self): parser = self.Parser() with pytest.raises(AttributeError): _", "parser.flag for variant in (\"yes\", \"Y\", \"yeS\", \"enable\", \"ENABLED\", \"1\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional", "Parser() parser.parse_args([]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([]) parser.parse_args([\"--args-set\", \"1\"]) assert isinstance(parser.args_set,", "\"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = [1, 2, 3, 4]\\n\") parser = Parser(config_files=[conf_file])", "AddressPort(argclass.Group): address: str port: int class Parser(argclass.Parser, AddressPort): pass parser = Parser() parser.parse_args([\"--address=0.0.0.0\",", "\"stream\") ) class Parser(argclass.Parser): log = LogGroup(defaults=dict(format=\"json\", level=\"error\")) parser = Parser() parser.parse_args([]) assert", "import logging import os import re import uuid from typing import List, Optional,", "\"config.ini\" with open(conf_file, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = {1, 2, 3, 4}\\n\")", "= repr(parser) assert r == \"<Parser: 1 arguments, 2 groups, 0 subparsers>\" def", "assert parser.http.port == 8080 assert parser.grpc.host == \"127.0.0.2\" assert parser.grpc.port == 9000 def", "assert parser.grpc.port == 6000 def test_parser_repr(self): parser = self.Parser() r = repr(parser) assert", "frozenset) assert parser.args_set == frozenset([1]) def test_nargs_env_var(): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument(", "class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) os.environ['NARGS'] =", "import uuid from typing import List, Optional, FrozenSet import pytest import argclass class", "== frozenset([1, 2, 3, 4, 5]) def test_nargs_1(): class Parser(argclass.Parser): args_set: FrozenSet[int] =", "in captured.out assert \"--help\" in captured.out assert \"--foo FOO\" in captured.out assert \"[--bar", "frozenset) assert parser.args_set == frozenset([]) parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\", \"4\", \"5\"]) assert isinstance(parser.args_set,", "\"info\"]) assert parser.log_level == logging.INFO parser.parse_args([\"--log-level=warning\"]) assert parser.log_level == logging.WARNING def test_optional_type(): class", "[1, 2, 3] class HostPortGroup(argclass.Group): host: str port: int class TestFoo: class Parser(argclass.Parser):", "[1, 2, 3, 4]\\n\") parser = Parser(config_files=[conf_file]) parser.parse_args([]) assert parser.nargs == frozenset({1, 2,", "def test_nargs_1(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=1, converter=frozenset ) parser", "assert parser.required == 100 os.environ.pop('TEST_REQUIRED') with pytest.raises(SystemExit): parser.parse_args([]) def test_nargs_and_converter(): class Parser(argclass.Parser): args_set:", "self.Parser(auto_env_var_prefix=f\"{prefix}_\") parser.parse_args([]) assert parser.foo == expected def test_env_var(request: pytest.FixtureRequest): env_var = re.sub(r\"\\d+\", \"\",", "parser.http.host == \"0.0.0.0\" assert parser.http.port == 80 assert parser.grpc.host == \"::\" assert parser.grpc.port", "\"0.0.0.0\", }, ) grpc: HostPortGroup = HostPortGroup( title=\"GRPC host and port\", defaults={\"port\": 6000,", "assert parser.http.port == 80 assert parser.grpc.host == \"::\" assert parser.grpc.port == 6000 def", "True for variant in (\"no\", \"crap\", \"false\", \"disabled\", \"MY_HANDS_TYPING_WORDS\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is", "20 def test_minimal_required(tmp_path): class Parser(argclass.Parser): required: int parser = Parser() with pytest.raises(SystemExit): parser.parse_args([])", "with open(conf_file, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = [1, 2, 3, 4]\\n\") parser", "List[int] = argclass.Argument( nargs=argclass.Nargs.ZERO_OR_MORE, type=int, ) bar: int = argclass.Argument(nargs=\"*\") spam: int =", "test_optional_is_not_required(tmp_path): class Parser(argclass.Parser): optional: Optional[int] = argclass.Argument(required=False) parser = Parser() parser.parse_args([]) assert parser.optional", "parser.parse_args([]) def test_nargs_and_converter(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=\"+\", converter=frozenset )", "address: str port: int class Parser(argclass.Parser, AddressPort): pass parser = Parser() parser.parse_args([\"--address=0.0.0.0\", \"--port=9876\"])", "assert parser.optional is None parser.parse_args([\"--optional=20\"]) assert parser.optional == 20 def test_minimal_required(tmp_path): class Parser(argclass.Parser):", "parser.parse_args([\"--required=20\"]) assert parser.required == 20 def test_log_group(): class LogGroup(argclass.Group): level: int = argclass.LogLevel", "Parser() parser.parse_args([\"--log-level\", \"info\"]) assert parser.log_level == logging.INFO parser.parse_args([\"--log-level=warning\"]) assert parser.log_level == logging.WARNING def", "frozenset([1]) def test_nargs_env_var(): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\"", "is None parser.parse_args([\"--optional=10\"]) assert parser.optional == 10 def test_optional_is_not_required(tmp_path): class Parser(argclass.Parser): optional: Optional[int]", "parser.foo def test_environment(self, request: pytest.FixtureRequest): prefix = re.sub(r\"\\d+\", \"\", uuid.uuid4().hex + uuid.uuid4().hex).upper() expected", "parser.log_level == logging.INFO parser.parse_args([\"--log-level=warning\"]) assert parser.log_level == logging.WARNING def test_optional_type(): class Parser(argclass.Parser): flag:", "== 6000 def test_parser_repr(self): parser = self.Parser() r = repr(parser) assert r ==", "pytest.CaptureFixture): class Parser(argclass.Parser): log_level: int = argclass.LogLevel parser = Parser() parser.parse_args([\"--log-level\", \"info\"]) assert", "parser.optional == 10 def test_optional_is_not_required(tmp_path): class Parser(argclass.Parser): optional: Optional[int] = argclass.Argument(required=False) parser =", "argclass.Argument(required=True) config_path = tmp_path / \"config.ini\" with open(config_path, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"required", "env_var=\"NARGS\" ) os.environ['NARGS'] = \"[1, 2, 3]\" try: parser = Parser() parser.parse_args([]) finally:", "parser.foo == \"bar\" parser.parse_args([ \"--foo=bar\", \"--api-host=127.0.0.1\", \"--api-port=8080\", \"--grpc-host=127.0.0.2\", \"--grpc-port=9000\", ]) assert parser.foo ==", "\"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"required = 10\\n\") fp.write(\"\\n\") parser = Parser(config_files=[config_path]) parser.parse_args([]) assert", "5]) def test_nargs_1(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=1, converter=frozenset )", "class Parser(argclass.Parser): log = LogGroup() parser = Parser() parser.parse_args([]) assert parser.log.level == logging.INFO", "\"2\", \"3\"]) assert parser.integers assert parser.integers == [1, 2, 3] class HostPortGroup(argclass.Group): host:", "= Parser() parser.parse_args([]) assert parser.optional is None parser.parse_args([\"--optional=20\"]) assert parser.optional == 20 def", "parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo == \"bar\" parser.parse_args([ \"--foo=bar\", \"--api-host=127.0.0.1\", \"--api-port=8080\", \"--grpc-host=127.0.0.2\", \"--grpc-port=9000\", ])", "HostPortGroup = HostPortGroup( title=\"HTTP host and port\", prefix=\"api\", defaults={ \"port\": 80, \"host\": \"0.0.0.0\",", "finally: del os.environ['NARGS'] assert parser.nargs == frozenset({1, 2, 3}) def test_nargs_config_list(tmp_path): class Parser(argclass.Parser):", "4}) def test_nargs_config_set(tmp_path): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\"", "\"1\"]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([1]) def test_nargs_env_var(): class Parser(argclass.Parser): nargs:", "== logging.WARNING def test_optional_type(): class Parser(argclass.Parser): flag: bool optional: Optional[bool] parser = Parser()", "3, 4}) def test_nargs_config_set(tmp_path): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset,", "FrozenSet import pytest import argclass class TestBasics: class Parser(argclass.Parser): integers: List[int] = argclass.Argument(", "bar: int = 0 parser = Parser() parser.print_help() captured = capsys.readouterr() assert \"--foo\"", "parser.optional is None parser.parse_args([\"--optional=20\"]) assert parser.optional == 20 def test_minimal_required(tmp_path): class Parser(argclass.Parser): required:", "grpc: HostPortGroup = HostPortGroup( title=\"GRPC host and port\", defaults={\"port\": 6000, \"host\": \"::\"}, )", "== \"127.0.0.2\" assert parser.grpc.port == 9000 def test_group_defaults(self): parser = self.Parser() parser.parse_args([\"--foo=bar\"]) assert", "choices=(\"json\", \"stream\") ) class Parser(argclass.Parser): log = LogGroup(defaults=dict(format=\"json\", level=\"error\")) parser = Parser() parser.parse_args([])", "assert parser.args_set == frozenset([1]) def test_nargs_env_var(): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int,", "5]) def test_nargs_and_converter_not_required(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset )", "parser.log.format == \"stream\" parser.parse_args([\"--log-level=debug\", \"--log-format=json\"]) assert parser.log.level == logging.DEBUG assert parser.log.format == \"json\"", "optional: Optional[bool] parser = Parser() parser.parse_args([]) assert parser.optional is None assert not parser.flag", "= LogGroup(defaults=dict(format=\"json\", level=\"error\")) parser = Parser() parser.parse_args([]) assert parser.log.level == logging.ERROR assert parser.log.format", "parser.args_set == frozenset([1]) def test_nargs_env_var(): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\",", "self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo == \"bar\" parser.parse_args([\"--foo=bar\"]) assert parser.foo == \"bar\" def", "\"bar\" def test_group(self): parser = self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo == \"bar\" parser.parse_args([", "parser.log.level == logging.ERROR assert parser.log.format == \"json\" def test_environment_required(): class Parser(argclass.Parser): required: int", "isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([]) parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\", \"4\", \"5\"]) assert", "assert parser.confused_default is False assert parser.pool_size == 2 assert parser.forks == 8 def", "80 assert parser.grpc.host == \"::\" assert parser.grpc.port == 6000 def test_parser_repr(self): parser =", "assert parser.bar == [3] assert parser.spam == [4] def test_group_aliases(): class Group(argclass.Group): foo:", "argclass.LogLevel format: str = argclass.Argument( choices=(\"json\", \"stream\") ) class Parser(argclass.Parser): log = LogGroup(defaults=dict(format=\"json\",", "assert parser.log.format == \"stream\" parser.parse_args([\"--log-level=debug\", \"--log-format=json\"]) assert parser.log.level == logging.DEBUG assert parser.log.format ==", "test_environment(self, request: pytest.FixtureRequest): prefix = re.sub(r\"\\d+\", \"\", uuid.uuid4().hex + uuid.uuid4().hex).upper() expected = uuid.uuid4().hex", "== logging.INFO assert parser.log.format == \"stream\" parser.parse_args([\"--log-level=debug\", \"--log-format=json\"]) assert parser.log.level == logging.DEBUG assert", "parser.http.host == \"127.0.0.1\" assert parser.http.port == 8080 assert parser.grpc.host == \"127.0.0.2\" assert parser.grpc.port", "3, 4, 5]) def test_nargs_and_converter_not_required(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\",", "in captured.out assert \"--bar\" in captured.out assert \"--help\" in captured.out assert \"--foo FOO\"", "integer for the accumulator\", ) accumulate = argclass.Argument( \"--sum\", action=argclass.Actions.STORE_CONST, const=sum, default=max, help=\"sum", "args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=1, converter=frozenset ) parser = Parser() parser.parse_args([]) assert", "test_env_var(request: pytest.FixtureRequest): env_var = re.sub(r\"\\d+\", \"\", uuid.uuid4().hex + uuid.uuid4().hex).upper() class Parser(argclass.Parser): foo: str", "log_level: int = argclass.LogLevel parser = Parser() parser.parse_args([\"--log-level\", \"info\"]) assert parser.log_level == logging.INFO", "parser.flag parser.parse_args([\"--flag\"]) assert parser.flag for variant in (\"yes\", \"Y\", \"yeS\", \"enable\", \"ENABLED\", \"1\"):", "action=argclass.Actions.STORE_CONST, const=sum, default=max, help=\"sum the integers (default: find the max)\", ) def test_simple(self):", "re.sub(r\"\\d+\", \"\", uuid.uuid4().hex + uuid.uuid4().hex).upper() class Parser(argclass.Parser): foo: str = argclass.Argument(env_var=env_var) expected =", "expected def test_nargs(): class Parser(argclass.Parser): foo: List[int] = argclass.Argument( nargs=argclass.Nargs.ZERO_OR_MORE, type=int, ) bar:", "= argclass.Argument(env_var=env_var) expected = uuid.uuid4().hex os.environ[env_var] = expected request.addfinalizer(lambda: os.environ.pop(env_var)) parser = Parser()", "with pytest.raises(SystemExit): parser.parse_args([]) def test_minimal_optional(tmp_path): class Parser(argclass.Parser): optional: Optional[int] parser = Parser() parser.parse_args([])", "== 9000 def test_group_defaults(self): parser = self.Parser() parser.parse_args([\"--foo=bar\"]) assert parser.foo == \"bar\" assert", "parser = Parser() parser.parse_args([]) assert parser.log.level == logging.INFO assert parser.log.format == \"stream\" parser.parse_args([\"--log-level=debug\",", "nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) os.environ['NARGS'] = \"[1, 2, 3]\" try: parser = Parser()", "parser.parse_args([\"1\", \"2\", \"3\"]) assert parser.integers assert parser.integers == [1, 2, 3] class HostPortGroup(argclass.Group):", "is None assert not parser.flag parser.parse_args([\"--flag\"]) assert parser.flag for variant in (\"yes\", \"Y\",", "parser = Parser() parser.parse_args([\"--foo=spam\", \"--bar=1\"]) assert parser.foo == \"spam\" assert parser.bar == 1", "Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) conf_file = tmp_path", "parser.log_level == logging.WARNING def test_optional_type(): class Parser(argclass.Parser): flag: bool optional: Optional[bool] parser =", "bar: int = argclass.Argument(nargs=\"*\") spam: int = argclass.Argument(nargs=1) parser = Parser() parser.parse_args([\"--foo\", \"1\",", "logging.INFO parser.parse_args([\"--log-level=warning\"]) assert parser.log_level == logging.WARNING def test_optional_type(): class Parser(argclass.Parser): flag: bool optional:", "= argclass.Argument(nargs=1) parser = Parser() parser.parse_args([\"--foo\", \"1\", \"2\", \"--bar=3\", \"--spam=4\"]) assert parser.foo ==", "Parser(argclass.Parser): flag: bool optional: Optional[bool] parser = Parser() parser.parse_args([]) assert parser.optional is None", "class Parser(argclass.Parser): log = LogGroup(defaults=dict(format=\"json\", level=\"error\")) parser = Parser() parser.parse_args([]) assert parser.log.level ==", "int parser = Parser() with pytest.raises(SystemExit): parser.parse_args([]) parser.parse_args([\"--required=20\"]) assert parser.required == 20 def", "isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([1, 2, 3, 4, 5]) def test_nargs_1(): class", "== frozenset([]) parser.parse_args([\"--args-set\", \"1\"]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([1]) def test_nargs_env_var():", "test_nargs_and_converter_not_required(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset ) parser =", "{1, 2, 3, 4}\\n\") parser = Parser(config_files=[conf_file]) parser.parse_args([]) assert parser.nargs == frozenset({1, 2,", "assert parser.log.format == \"json\" def test_log_group_defaults(): class LogGroup(argclass.Group): level: int = argclass.LogLevel format:", "parser.log.level == logging.DEBUG assert parser.log.format == \"json\" def test_log_group_defaults(): class LogGroup(argclass.Group): level: int", "parser = self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo == \"bar\" parser.parse_args([\"--foo=bar\"]) assert parser.foo ==", "== \"0.0.0.0\" assert parser.http.port == 80 assert parser.grpc.host == \"::\" assert parser.grpc.port ==", "os.environ['NARGS'] = \"[1, 2, 3]\" try: parser = Parser() parser.parse_args([]) finally: del os.environ['NARGS']", "parser.foo == expected def test_nargs(): class Parser(argclass.Parser): foo: List[int] = argclass.Argument( nargs=argclass.Nargs.ZERO_OR_MORE, type=int,", "class Parser(argclass.Parser): log_level: int = argclass.LogLevel parser = Parser() parser.parse_args([\"--log-level\", \"info\"]) assert parser.log_level", "parser.nargs == frozenset({1, 2, 3}) def test_nargs_config_list(tmp_path): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument(", "\"127.0.0.1\" assert parser.http.port == 8080 assert parser.grpc.host == \"127.0.0.2\" assert parser.grpc.port == 9000", "assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([]) parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\", \"4\", \"5\"])", "parser.parse_args([\"--args-set\", \"1\"]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([1]) def test_nargs_env_var(): class Parser(argclass.Parser):", "10 def test_optional_is_not_required(tmp_path): class Parser(argclass.Parser): optional: Optional[int] = argclass.Argument(required=False) parser = Parser() parser.parse_args([])", "List[int] = argclass.Argument( \"integers\", type=int, nargs=argclass.Nargs.ONE_OR_MORE, metavar=\"N\", help=\"an integer for the accumulator\", )", "def test_argument_defaults(): class Parser(argclass.Parser): debug: bool = False confused_default: bool = True pool_size:", "optional: Optional[int] parser = Parser() parser.parse_args([]) assert parser.optional is None parser.parse_args([\"--optional=10\"]) assert parser.optional", "Parser() with pytest.raises(SystemExit): parser.parse_args([]) parser.parse_args([\"--required=20\"]) assert parser.required == 20 def test_log_group(): class LogGroup(argclass.Group):", "FrozenSet[int] = argclass.Argument( type=int, nargs=\"+\", converter=frozenset ) parser = Parser() parser.parse_args([\"--args-set\", \"1\", \"2\",", "parser = Parser() parser.parse_args([]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([]) parser.parse_args([\"--args-set\", \"1\",", "parser.args_set == frozenset([]) parser.parse_args([\"--args-set\", \"1\"]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([1]) def", "parser.parse_args([]) assert parser.optional is None assert not parser.flag parser.parse_args([\"--flag\"]) assert parser.flag for variant", "def test_minimal_optional(tmp_path): class Parser(argclass.Parser): optional: Optional[int] parser = Parser() parser.parse_args([]) assert parser.optional is", "\"3\", \"4\", \"5\"]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([1, 2, 3, 4,", "def test_nargs_config_set(tmp_path): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" )", "parser = Parser() parser.parse_args([]) assert parser.optional is None parser.parse_args([\"--optional=10\"]) assert parser.optional == 10", "\"--log-format=json\"]) assert parser.log.level == logging.DEBUG assert parser.log.format == \"json\" def test_log_group_defaults(): class LogGroup(argclass.Group):", "pytest.raises(SystemExit): parser.parse_args([]) parser.parse_args([\"--required=20\"]) assert parser.required == 20 def test_log_group(): class LogGroup(argclass.Group): level: int", "assert parser.integers == [1, 2, 3] class HostPortGroup(argclass.Group): host: str port: int class", "parser.bar == 1 def test_print_help(capsys: pytest.CaptureFixture): class Parser(argclass.Parser): foo: str bar: int =", "default=max, help=\"sum the integers (default: find the max)\", ) def test_simple(self): parser =", "def test_optional_type(): class Parser(argclass.Parser): flag: bool optional: Optional[bool] parser = Parser() parser.parse_args([]) assert", "= HostPortGroup( title=\"GRPC host and port\", defaults={\"port\": 6000, \"host\": \"::\"}, ) def test_simple(self):", "parser = self.Parser() parser.parse_args([\"--foo=bar\"]) assert parser.foo == \"bar\" assert parser.http.host == \"0.0.0.0\" assert", "/ \"config.ini\" with open(conf_file, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = [1, 2, 3,", "frozenset) assert parser.args_set == frozenset([1, 2, 3, 4, 5]) def test_nargs_1(): class Parser(argclass.Parser):", "Parser() parser.parse_args([]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([]) parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\",", "== \"127.0.0.1\" assert parser.http.port == 8080 assert parser.grpc.host == \"127.0.0.2\" assert parser.grpc.port ==", "port\", prefix=\"api\", defaults={ \"port\": 80, \"host\": \"0.0.0.0\", }, ) grpc: HostPortGroup = HostPortGroup(", "4 assert parser.forks == 2 parser.parse_args([ \"--debug\", \"--forks=8\", \"--pool-size=2\", \"--confused-default\", ]) assert parser.debug", "= Parser() with pytest.raises(SystemExit): parser.parse_args([]) parser.parse_args([\"--required=20\"]) assert parser.required == 20 def test_log_group(): class", "assert parser.spam == [4] def test_group_aliases(): class Group(argclass.Group): foo: str = argclass.Argument(\"-F\") class", "tmp_path / \"config.ini\" with open(conf_file, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = [1, 2,", "test_minimal_optional(tmp_path): class Parser(argclass.Parser): optional: Optional[int] parser = Parser() parser.parse_args([]) assert parser.optional is None", "argclass.Argument(required=False) parser = Parser() parser.parse_args([]) assert parser.optional is None parser.parse_args([\"--optional=20\"]) assert parser.optional ==", "= Parser() parser.parse_args([]) assert parser.optional is None assert not parser.flag parser.parse_args([\"--flag\"]) assert parser.flag", "2, 3, 4}\\n\") parser = Parser(config_files=[conf_file]) parser.parse_args([]) assert parser.nargs == frozenset({1, 2, 3,", "port: int class Parser(argclass.Parser, AddressPort): pass parser = Parser() parser.parse_args([\"--address=0.0.0.0\", \"--port=9876\"]) assert parser.address", "def test_log_group(): class LogGroup(argclass.Group): level: int = argclass.LogLevel format = argclass.Argument( choices=(\"json\", \"stream\"),", "parser.required == 100 os.environ.pop('TEST_REQUIRED') with pytest.raises(SystemExit): parser.parse_args([]) def test_nargs_and_converter(): class Parser(argclass.Parser): args_set: FrozenSet[int]", "def test_nargs(): class Parser(argclass.Parser): foo: List[int] = argclass.Argument( nargs=argclass.Nargs.ZERO_OR_MORE, type=int, ) bar: int", "foo: str = argclass.Argument(env_var=env_var) expected = uuid.uuid4().hex os.environ[env_var] = expected request.addfinalizer(lambda: os.environ.pop(env_var)) parser", "= argclass.Argument(\"-F\") class Parser(argclass.Parser): group = Group() parser = Parser() parser.parse_args([\"-F\", \"egg\"]) assert", ") conf_file = tmp_path / \"config.ini\" with open(conf_file, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs", "TestFoo: class Parser(argclass.Parser): foo: str = argclass.Argument(help=\"foo\") http: HostPortGroup = HostPortGroup( title=\"HTTP host", "argclass.Argument( nargs=argclass.Nargs.ZERO_OR_MORE, type=int, ) bar: int = argclass.Argument(nargs=\"*\") spam: int = argclass.Argument(nargs=1) parser", "class LogGroup(argclass.Group): level: int = argclass.LogLevel format = argclass.Argument( choices=(\"json\", \"stream\"), default=\"stream\" )", "def test_config_for_required(tmp_path): class Parser(argclass.Parser): required: int = argclass.Argument(required=True) config_path = tmp_path / \"config.ini\"", "uuid.uuid4().hex os.environ[env_var] = expected request.addfinalizer(lambda: os.environ.pop(env_var)) parser = Parser() parser.parse_args([]) assert parser.foo ==", "self.Parser() with pytest.raises(AttributeError): _ = parser.foo def test_environment(self, request: pytest.FixtureRequest): prefix = re.sub(r\"\\d+\",", "= 2 parser = Parser() parser.parse_args([]) assert parser.debug is False assert parser.confused_default is", "= argclass.Argument( \"integers\", type=int, nargs=argclass.Nargs.ONE_OR_MORE, metavar=\"N\", help=\"an integer for the accumulator\", ) accumulate", "assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([1]) def test_nargs_env_var(): class Parser(argclass.Parser): nargs: FrozenSet[int]", "4]\\n\") parser = Parser(config_files=[conf_file]) parser.parse_args([]) assert parser.nargs == frozenset({1, 2, 3, 4}) def", "Parser(config_files=[conf_file]) parser.parse_args([]) assert parser.nargs == frozenset({1, 2, 3, 4}) def test_nargs_config_set(tmp_path): class Parser(argclass.Parser):", "assert parser.foo == \"bar\" assert parser.http.host == \"127.0.0.1\" assert parser.http.port == 8080 assert", "argclass.LogLevel format = argclass.Argument( choices=(\"json\", \"stream\"), default=\"stream\" ) class Parser(argclass.Parser): log = LogGroup()", "class Parser(argclass.Parser): required: int parser = Parser(auto_env_var_prefix=\"TEST_\") os.environ['TEST_REQUIRED'] = \"100\" parser.parse_args([]) assert parser.required", "forks: int = 2 parser = Parser() parser.parse_args([]) assert parser.debug is False assert", "port: int class TestFoo: class Parser(argclass.Parser): foo: str = argclass.Argument(help=\"foo\") http: HostPortGroup =", "== 20 def test_minimal_required(tmp_path): class Parser(argclass.Parser): required: int parser = Parser() with pytest.raises(SystemExit):", "def test_nargs_and_converter(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=\"+\", converter=frozenset ) parser", "Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) os.environ['NARGS'] = \"[1,", "as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = [1, 2, 3, 4]\\n\") parser = Parser(config_files=[conf_file]) parser.parse_args([])", "def test_parser_repr(self): parser = self.Parser() r = repr(parser) assert r == \"<Parser: 1", "parser.foo == \"bar\" def test_group(self): parser = self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo ==", "defaults={\"port\": 6000, \"host\": \"::\"}, ) def test_simple(self): parser = self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert", "assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([1, 2, 3, 4, 5]) def test_nargs_1():", "with open(conf_file, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = {1, 2, 3, 4}\\n\") parser", "is False assert parser.confused_default is True assert parser.pool_size == 4 assert parser.forks ==", "\"--port=9876\"]) assert parser.address == \"0.0.0.0\" assert parser.port == 9876 def test_config_for_required(tmp_path): class Parser(argclass.Parser):", "assert parser.grpc.host == \"::\" assert parser.grpc.port == 6000 def test_parser_repr(self): parser = self.Parser()", "Parser(argclass.Parser): log_level: int = argclass.LogLevel parser = Parser() parser.parse_args([\"--log-level\", \"info\"]) assert parser.log_level ==", "test_log_group(): class LogGroup(argclass.Group): level: int = argclass.LogLevel format = argclass.Argument( choices=(\"json\", \"stream\"), default=\"stream\"", "host and port\", defaults={\"port\": 6000, \"host\": \"::\"}, ) def test_simple(self): parser = self.Parser()", "assert parser.args_set == frozenset([1, 2, 3, 4, 5]) def test_nargs_and_converter_not_required(): class Parser(argclass.Parser): args_set:", "3]\" try: parser = Parser() parser.parse_args([]) finally: del os.environ['NARGS'] assert parser.nargs == frozenset({1,", "parser.parse_args([\"-F\", \"egg\"]) assert parser.group.foo == \"egg\" def test_short_parser_definition(): class Parser(argclass.Parser): foo: str bar:", "assert parser.grpc.port == 9000 def test_group_defaults(self): parser = self.Parser() parser.parse_args([\"--foo=bar\"]) assert parser.foo ==", "type=int, nargs=\"*\", converter=frozenset ) parser = Parser() parser.parse_args([]) assert isinstance(parser.args_set, frozenset) assert parser.args_set", "pytest.CaptureFixture): class Parser(argclass.Parser): foo: str bar: int = 0 parser = Parser() parser.print_help()", "assert not parser.flag parser.parse_args([\"--flag\"]) assert parser.flag for variant in (\"yes\", \"Y\", \"yeS\", \"enable\",", "required: int parser = Parser() with pytest.raises(SystemExit): parser.parse_args([]) parser.parse_args([\"--required=20\"]) assert parser.required == 20", "assert parser.foo == \"bar\" def test_group(self): parser = self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo", "\"config.ini\" with open(conf_file, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = [1, 2, 3, 4]\\n\")", "frozenset([]) parser.parse_args([\"--args-set\", \"1\"]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([1]) def test_nargs_env_var(): class", "def test_print_log_level(capsys: pytest.CaptureFixture): class Parser(argclass.Parser): log_level: int = argclass.LogLevel parser = Parser() parser.parse_args([\"--log-level\",", "str bar: int parser = Parser() parser.parse_args([\"--foo=spam\", \"--bar=1\"]) assert parser.foo == \"spam\" assert", "parser = Parser(config_files=[conf_file]) parser.parse_args([]) assert parser.nargs == frozenset({1, 2, 3, 4}) def test_nargs_config_set(tmp_path):", "parser.foo == \"bar\" assert parser.http.host == \"0.0.0.0\" assert parser.http.port == 80 assert parser.grpc.host", "Parser(argclass.Parser): required: int = argclass.Argument(required=True) config_path = tmp_path / \"config.ini\" with open(config_path, \"w\")", "/ \"config.ini\" with open(config_path, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"required = 10\\n\") fp.write(\"\\n\") parser", "uuid from typing import List, Optional, FrozenSet import pytest import argclass class TestBasics:", "1 arguments, 2 groups, 0 subparsers>\" def test_access_to_not_parsed_attrs(self): parser = self.Parser() with pytest.raises(AttributeError):", "pytest.raises(AttributeError): _ = parser.foo def test_environment(self, request: pytest.FixtureRequest): prefix = re.sub(r\"\\d+\", \"\", uuid.uuid4().hex", "assert parser.foo == expected def test_env_var(request: pytest.FixtureRequest): env_var = re.sub(r\"\\d+\", \"\", uuid.uuid4().hex +", "parser.parse_args([]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([]) parser.parse_args([\"--args-set\", \"1\"]) assert isinstance(parser.args_set, frozenset)", "Parser(argclass.Parser): required: int parser = Parser() with pytest.raises(SystemExit): parser.parse_args([]) parser.parse_args([\"--required=20\"]) assert parser.required ==", "80, \"host\": \"0.0.0.0\", }, ) grpc: HostPortGroup = HostPortGroup( title=\"GRPC host and port\",", "parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo == \"bar\" parser.parse_args([\"--foo=bar\"]) assert parser.foo == \"bar\" def test_group(self):", "def test_nargs_config_list(tmp_path): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" )", "= argclass.LogLevel format: str = argclass.Argument( choices=(\"json\", \"stream\") ) class Parser(argclass.Parser): log =", "open(conf_file, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = {1, 2, 3, 4}\\n\") parser =", "Parser() parser.parse_args([]) assert parser.foo == expected def test_nargs(): class Parser(argclass.Parser): foo: List[int] =", "= Parser() parser.parse_args([]) assert parser.optional is None parser.parse_args([\"--optional=10\"]) assert parser.optional == 10 def", "= Parser(auto_env_var_prefix=\"TEST_\") os.environ['TEST_REQUIRED'] = \"100\" parser.parse_args([]) assert parser.required == 100 os.environ.pop('TEST_REQUIRED') with pytest.raises(SystemExit):", "Group(argclass.Group): foo: str = argclass.Argument(\"-F\") class Parser(argclass.Parser): group = Group() parser = Parser()", "= 10\\n\") fp.write(\"\\n\") parser = Parser(config_files=[config_path]) parser.parse_args([]) assert parser.required == 10 parser =", "\"--grpc-port=9000\", ]) assert parser.foo == \"bar\" assert parser.http.host == \"127.0.0.1\" assert parser.http.port ==", "assert parser.grpc.host == \"127.0.0.2\" assert parser.grpc.port == 9000 def test_group_defaults(self): parser = self.Parser()", "fp.write(\"\\n\") parser = Parser(config_files=[config_path]) parser.parse_args([]) assert parser.required == 10 parser = Parser(config_files=[]) with", "\"json\" def test_log_group_defaults(): class LogGroup(argclass.Group): level: int = argclass.LogLevel format: str = argclass.Argument(", "parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\", \"4\", \"5\"]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([1,", "parser.parse_args([]) def test_minimal_optional(tmp_path): class Parser(argclass.Parser): optional: Optional[int] parser = Parser() parser.parse_args([]) assert parser.optional", "= {1, 2, 3, 4}\\n\") parser = Parser(config_files=[conf_file]) parser.parse_args([]) assert parser.nargs == frozenset({1,", "bar: int parser = Parser() parser.parse_args([\"--foo=spam\", \"--bar=1\"]) assert parser.foo == \"spam\" assert parser.bar", "parser.port == 9876 def test_config_for_required(tmp_path): class Parser(argclass.Parser): required: int = argclass.Argument(required=True) config_path =", "= argclass.Argument( nargs=argclass.Nargs.ZERO_OR_MORE, type=int, ) bar: int = argclass.Argument(nargs=\"*\") spam: int = argclass.Argument(nargs=1)", "int = argclass.LogLevel format = argclass.Argument( choices=(\"json\", \"stream\"), default=\"stream\" ) class Parser(argclass.Parser): log", "\"--bar=3\", \"--spam=4\"]) assert parser.foo == [1, 2] assert parser.bar == [3] assert parser.spam", ") parser = Parser() parser.parse_args([]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([]) parser.parse_args([\"--args-set\",", "[4] def test_group_aliases(): class Group(argclass.Group): foo: str = argclass.Argument(\"-F\") class Parser(argclass.Parser): group =", "Parser(argclass.Parser): log = LogGroup(defaults=dict(format=\"json\", level=\"error\")) parser = Parser() parser.parse_args([]) assert parser.log.level == logging.ERROR", "integers: List[int] = argclass.Argument( \"integers\", type=int, nargs=argclass.Nargs.ONE_OR_MORE, metavar=\"N\", help=\"an integer for the accumulator\",", "argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) os.environ['NARGS'] = \"[1, 2, 3]\" try: parser", "parser = self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo == \"bar\" parser.parse_args([ \"--foo=bar\", \"--api-host=127.0.0.1\", \"--api-port=8080\",", "logging.INFO assert parser.log.format == \"stream\" parser.parse_args([\"--log-level=debug\", \"--log-format=json\"]) assert parser.log.level == logging.DEBUG assert parser.log.format", "Parser(auto_env_var_prefix=\"TEST_\") os.environ['TEST_REQUIRED'] = \"100\" parser.parse_args([]) assert parser.required == 100 os.environ.pop('TEST_REQUIRED') with pytest.raises(SystemExit): parser.parse_args([])", "class Parser(argclass.Parser, AddressPort): pass parser = Parser() parser.parse_args([\"--address=0.0.0.0\", \"--port=9876\"]) assert parser.address == \"0.0.0.0\"", "False confused_default: bool = True pool_size: int = 4 forks: int = 2", "logging.DEBUG assert parser.log.format == \"json\" def test_log_group_defaults(): class LogGroup(argclass.Group): level: int = argclass.LogLevel", "pytest.raises(SystemExit): parser.parse_args([]) def test_minimal_optional(tmp_path): class Parser(argclass.Parser): optional: Optional[int] parser = Parser() parser.parse_args([]) assert", "Parser(config_files=[]) with pytest.raises(SystemExit): parser.parse_args([]) def test_minimal_optional(tmp_path): class Parser(argclass.Parser): optional: Optional[int] parser = Parser()", "expected request.addfinalizer(lambda: os.environ.pop(env_var)) parser = Parser() parser.parse_args([]) assert parser.foo == expected def test_nargs():", "str = argclass.Argument(\"-F\") class Parser(argclass.Parser): group = Group() parser = Parser() parser.parse_args([\"-F\", \"egg\"])", "assert parser.port == 9876 def test_config_for_required(tmp_path): class Parser(argclass.Parser): required: int = argclass.Argument(required=True) config_path", "parser.parse_args([ \"--foo=bar\", \"--api-host=127.0.0.1\", \"--api-port=8080\", \"--grpc-host=127.0.0.2\", \"--grpc-port=9000\", ]) assert parser.foo == \"bar\" assert parser.http.host", "\"disabled\", \"MY_HANDS_TYPING_WORDS\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is False def test_argument_defaults(): class Parser(argclass.Parser): debug: bool", "== \"bar\" def test_group(self): parser = self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo == \"bar\"", "fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"required = 10\\n\") fp.write(\"\\n\") parser = Parser(config_files=[config_path]) parser.parse_args([]) assert parser.required ==", "assert parser.foo == \"bar\" assert parser.http.host == \"0.0.0.0\" assert parser.http.port == 80 assert", "confused_default: bool = True pool_size: int = 4 forks: int = 2 parser", "pytest import argclass class TestBasics: class Parser(argclass.Parser): integers: List[int] = argclass.Argument( \"integers\", type=int,", "find the max)\", ) def test_simple(self): parser = self.Parser() parser.parse_args([\"1\", \"2\", \"3\"]) assert", "= argclass.LogLevel format = argclass.Argument( choices=(\"json\", \"stream\"), default=\"stream\" ) class Parser(argclass.Parser): log =", "Parser() parser.parse_args([]) assert parser.log.level == logging.INFO assert parser.log.format == \"stream\" parser.parse_args([\"--log-level=debug\", \"--log-format=json\"]) assert", "assert parser.optional == 20 def test_minimal_required(tmp_path): class Parser(argclass.Parser): required: int parser = Parser()", "= expected request.addfinalizer(lambda: os.environ.pop(env_var)) parser = Parser() parser.parse_args([]) assert parser.foo == expected def", "str bar: int = 0 parser = Parser() parser.print_help() captured = capsys.readouterr() assert", "int = 0 parser = Parser() parser.print_help() captured = capsys.readouterr() assert \"--foo\" in", "parser = Parser() parser.parse_args([]) assert parser.optional is None assert not parser.flag parser.parse_args([\"--flag\"]) assert", "http: HostPortGroup = HostPortGroup( title=\"HTTP host and port\", prefix=\"api\", defaults={ \"port\": 80, \"host\":", "assert parser.foo == \"bar\" parser.parse_args([ \"--foo=bar\", \"--api-host=127.0.0.1\", \"--api-port=8080\", \"--grpc-host=127.0.0.2\", \"--grpc-port=9000\", ]) assert parser.foo", "def test_print_help(capsys: pytest.CaptureFixture): class Parser(argclass.Parser): foo: str bar: int = 0 parser =", "= capsys.readouterr() assert \"--foo\" in captured.out assert \"--bar\" in captured.out assert \"--help\" in", "uuid.uuid4().hex + uuid.uuid4().hex).upper() expected = uuid.uuid4().hex os.environ[f\"{prefix}_FOO\"] = expected request.addfinalizer(lambda: os.environ.pop(f\"{prefix}_FOO\")) parser =", "r = repr(parser) assert r == \"<Parser: 1 arguments, 2 groups, 0 subparsers>\"", "assert \"[--bar BAR]\" in captured.out def test_print_log_level(capsys: pytest.CaptureFixture): class Parser(argclass.Parser): log_level: int =", "str port: int class Parser(argclass.Parser, AddressPort): pass parser = Parser() parser.parse_args([\"--address=0.0.0.0\", \"--port=9876\"]) assert", "Parser(argclass.Parser): foo: str bar: int = 0 parser = Parser() parser.print_help() captured =", "spam: int = argclass.Argument(nargs=1) parser = Parser() parser.parse_args([\"--foo\", \"1\", \"2\", \"--bar=3\", \"--spam=4\"]) assert", "optional: Optional[int] = argclass.Argument(required=False) parser = Parser() parser.parse_args([]) assert parser.optional is None parser.parse_args([\"--optional=20\"])", "2, 3] class HostPortGroup(argclass.Group): host: str port: int class TestFoo: class Parser(argclass.Parser): foo:", "= argclass.Argument(required=True) config_path = tmp_path / \"config.ini\" with open(config_path, \"w\") as fp: fp.write(\"[DEFAULT]\\n\")", "= Parser() parser.parse_args([]) assert parser.debug is False assert parser.confused_default is True assert parser.pool_size", "6000, \"host\": \"::\"}, ) def test_simple(self): parser = self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo", "int class TestFoo: class Parser(argclass.Parser): foo: str = argclass.Argument(help=\"foo\") http: HostPortGroup = HostPortGroup(", "= self.Parser() parser.parse_args([\"1\", \"2\", \"3\"]) assert parser.integers assert parser.integers == [1, 2, 3]", "argclass.Argument(nargs=\"*\") spam: int = argclass.Argument(nargs=1) parser = Parser() parser.parse_args([\"--foo\", \"1\", \"2\", \"--bar=3\", \"--spam=4\"])", "(\"yes\", \"Y\", \"yeS\", \"enable\", \"ENABLED\", \"1\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is True for variant", "bool = True pool_size: int = 4 forks: int = 2 parser =", "class Parser(argclass.Parser): optional: Optional[int] parser = Parser() parser.parse_args([]) assert parser.optional is None parser.parse_args([\"--optional=10\"])", "== \"json\" def test_log_group_defaults(): class LogGroup(argclass.Group): level: int = argclass.LogLevel format: str =", "Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=\"+\", converter=frozenset ) parser = Parser() parser.parse_args([\"--args-set\",", "parser.parse_args([]) assert parser.nargs == frozenset({1, 2, 3, 4}) def test_nargs_config_set(tmp_path): class Parser(argclass.Parser): nargs:", "accumulate = argclass.Argument( \"--sum\", action=argclass.Actions.STORE_CONST, const=sum, default=max, help=\"sum the integers (default: find the", "100 os.environ.pop('TEST_REQUIRED') with pytest.raises(SystemExit): parser.parse_args([]) def test_nargs_and_converter(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument(", "parser.pool_size == 4 assert parser.forks == 2 parser.parse_args([ \"--debug\", \"--forks=8\", \"--pool-size=2\", \"--confused-default\", ])", "parser.grpc.host == \"::\" assert parser.grpc.port == 6000 def test_parser_repr(self): parser = self.Parser() r", "= tmp_path / \"config.ini\" with open(config_path, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"required = 10\\n\")", "del os.environ['NARGS'] assert parser.nargs == frozenset({1, 2, 3}) def test_nargs_config_list(tmp_path): class Parser(argclass.Parser): nargs:", "assert r == \"<Parser: 1 arguments, 2 groups, 0 subparsers>\" def test_access_to_not_parsed_attrs(self): parser", "= uuid.uuid4().hex os.environ[f\"{prefix}_FOO\"] = expected request.addfinalizer(lambda: os.environ.pop(f\"{prefix}_FOO\")) parser = self.Parser(auto_env_var_prefix=f\"{prefix}_\") parser.parse_args([]) assert parser.foo", "\"spam\" assert parser.bar == 1 def test_print_help(capsys: pytest.CaptureFixture): class Parser(argclass.Parser): foo: str bar:", "\"1\", \"2\", \"3\", \"4\", \"5\"]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([1, 2,", "test_optional_type(): class Parser(argclass.Parser): flag: bool optional: Optional[bool] parser = Parser() parser.parse_args([]) assert parser.optional", "parser.group.foo == \"egg\" def test_short_parser_definition(): class Parser(argclass.Parser): foo: str bar: int parser =", "def test_access_to_not_parsed_attrs(self): parser = self.Parser() with pytest.raises(AttributeError): _ = parser.foo def test_environment(self, request:", "parser.parse_args([]) finally: del os.environ['NARGS'] assert parser.nargs == frozenset({1, 2, 3}) def test_nargs_config_list(tmp_path): class", "for the accumulator\", ) accumulate = argclass.Argument( \"--sum\", action=argclass.Actions.STORE_CONST, const=sum, default=max, help=\"sum the", "parser = Parser() parser.parse_args([]) assert parser.optional is None parser.parse_args([\"--optional=20\"]) assert parser.optional == 20", "parser = Parser() parser.parse_args([\"--address=0.0.0.0\", \"--port=9876\"]) assert parser.address == \"0.0.0.0\" assert parser.port == 9876", "9000 def test_group_defaults(self): parser = self.Parser() parser.parse_args([\"--foo=bar\"]) assert parser.foo == \"bar\" assert parser.http.host", "def test_log_group_defaults(): class LogGroup(argclass.Group): level: int = argclass.LogLevel format: str = argclass.Argument( choices=(\"json\",", "int = argclass.Argument(nargs=1) parser = Parser() parser.parse_args([\"--foo\", \"1\", \"2\", \"--bar=3\", \"--spam=4\"]) assert parser.foo", "frozenset({1, 2, 3}) def test_nargs_config_list(tmp_path): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\",", "== frozenset({1, 2, 3, 4}) def test_nargs_config_set(tmp_path): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument(", "Optional[int] = argclass.Argument(required=False) parser = Parser() parser.parse_args([]) assert parser.optional is None parser.parse_args([\"--optional=20\"]) assert", "assert \"--bar\" in captured.out assert \"--help\" in captured.out assert \"--foo FOO\" in captured.out", "test_nargs_and_converter(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=\"+\", converter=frozenset ) parser =", "= \"[1, 2, 3]\" try: parser = Parser() parser.parse_args([]) finally: del os.environ['NARGS'] assert", "= self.Parser(auto_env_var_prefix=f\"{prefix}_\") parser.parse_args([]) assert parser.foo == expected def test_env_var(request: pytest.FixtureRequest): env_var = re.sub(r\"\\d+\",", "= argclass.Argument( choices=(\"json\", \"stream\"), default=\"stream\" ) class Parser(argclass.Parser): log = LogGroup() parser =", "\"ENABLED\", \"1\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is True for variant in (\"no\", \"crap\", \"false\",", "assert parser.optional == 10 def test_optional_is_not_required(tmp_path): class Parser(argclass.Parser): optional: Optional[int] = argclass.Argument(required=False) parser", "= self.Parser() with pytest.raises(AttributeError): _ = parser.foo def test_environment(self, request: pytest.FixtureRequest): prefix =", "parser.optional == 20 def test_minimal_required(tmp_path): class Parser(argclass.Parser): required: int parser = Parser() with", "= argclass.Argument( type=int, nargs=\"*\", converter=frozenset ) parser = Parser() parser.parse_args([]) assert isinstance(parser.args_set, frozenset)", "parser = Parser() parser.parse_args([]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([]) parser.parse_args([\"--args-set\", \"1\"])", "== 100 os.environ.pop('TEST_REQUIRED') with pytest.raises(SystemExit): parser.parse_args([]) def test_nargs_and_converter(): class Parser(argclass.Parser): args_set: FrozenSet[int] =", "= [1, 2, 3, 4]\\n\") parser = Parser(config_files=[conf_file]) parser.parse_args([]) assert parser.nargs == frozenset({1,", "= argclass.LogLevel parser = Parser() parser.parse_args([\"--log-level\", \"info\"]) assert parser.log_level == logging.INFO parser.parse_args([\"--log-level=warning\"]) assert", "not parser.flag parser.parse_args([\"--flag\"]) assert parser.flag for variant in (\"yes\", \"Y\", \"yeS\", \"enable\", \"ENABLED\",", "Parser() parser.parse_args([\"--address=0.0.0.0\", \"--port=9876\"]) assert parser.address == \"0.0.0.0\" assert parser.port == 9876 def test_config_for_required(tmp_path):", "class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=\"+\", converter=frozenset ) parser = Parser()", "frozenset([]) parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\", \"4\", \"5\"]) assert isinstance(parser.args_set, frozenset) assert parser.args_set ==", "\"bar\"]) assert parser.foo == \"bar\" parser.parse_args([ \"--foo=bar\", \"--api-host=127.0.0.1\", \"--api-port=8080\", \"--grpc-host=127.0.0.2\", \"--grpc-port=9000\", ]) assert", "parser.integers assert parser.integers == [1, 2, 3] class HostPortGroup(argclass.Group): host: str port: int", "\"--confused-default\", ]) assert parser.debug is True assert parser.confused_default is False assert parser.pool_size ==", "converter=frozenset ) parser = Parser() parser.parse_args([]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([])", "= argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) conf_file = tmp_path / \"config.ini\" with", "4 forks: int = 2 parser = Parser() parser.parse_args([]) assert parser.debug is False", "assert parser.nargs == frozenset({1, 2, 3, 4}) def test_nargs_config_set(tmp_path): class Parser(argclass.Parser): nargs: FrozenSet[int]", "== logging.ERROR assert parser.log.format == \"json\" def test_environment_required(): class Parser(argclass.Parser): required: int parser", "assert parser.args_set == frozenset([1, 2, 3, 4, 5]) def test_nargs_1(): class Parser(argclass.Parser): args_set:", "re.sub(r\"\\d+\", \"\", uuid.uuid4().hex + uuid.uuid4().hex).upper() expected = uuid.uuid4().hex os.environ[f\"{prefix}_FOO\"] = expected request.addfinalizer(lambda: os.environ.pop(f\"{prefix}_FOO\"))", "int = argclass.Argument(nargs=\"*\") spam: int = argclass.Argument(nargs=1) parser = Parser() parser.parse_args([\"--foo\", \"1\", \"2\",", "3, 4}\\n\") parser = Parser(config_files=[conf_file]) parser.parse_args([]) assert parser.nargs == frozenset({1, 2, 3, 4})", "assert parser.required == 10 parser = Parser(config_files=[]) with pytest.raises(SystemExit): parser.parse_args([]) def test_minimal_optional(tmp_path): class", "FOO\" in captured.out assert \"[--bar BAR]\" in captured.out def test_print_log_level(capsys: pytest.CaptureFixture): class Parser(argclass.Parser):", "parser.nargs == frozenset({1, 2, 3, 4}) def test_nargs_config_set(tmp_path): class Parser(argclass.Parser): nargs: FrozenSet[int] =", "captured.out assert \"[--bar BAR]\" in captured.out def test_print_log_level(capsys: pytest.CaptureFixture): class Parser(argclass.Parser): log_level: int", "help=\"an integer for the accumulator\", ) accumulate = argclass.Argument( \"--sum\", action=argclass.Actions.STORE_CONST, const=sum, default=max,", "]) assert parser.debug is True assert parser.confused_default is False assert parser.pool_size == 2", "converter=frozenset, env_var=\"NARGS\" ) conf_file = tmp_path / \"config.ini\" with open(conf_file, \"w\") as fp:", "2, 3}) def test_nargs_config_list(tmp_path): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset,", "isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([]) parser.parse_args([\"--args-set\", \"1\"]) assert isinstance(parser.args_set, frozenset) assert parser.args_set", "2, 3, 4, 5]) def test_nargs_1(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int,", "as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = {1, 2, 3, 4}\\n\") parser = Parser(config_files=[conf_file]) parser.parse_args([])", "\"[--bar BAR]\" in captured.out def test_print_log_level(capsys: pytest.CaptureFixture): class Parser(argclass.Parser): log_level: int = argclass.LogLevel", "def test_environment(self, request: pytest.FixtureRequest): prefix = re.sub(r\"\\d+\", \"\", uuid.uuid4().hex + uuid.uuid4().hex).upper() expected =", "flag: bool optional: Optional[bool] parser = Parser() parser.parse_args([]) assert parser.optional is None assert", "def test_simple(self): parser = self.Parser() parser.parse_args([\"1\", \"2\", \"3\"]) assert parser.integers assert parser.integers ==", "assert parser.optional is None parser.parse_args([\"--optional=10\"]) assert parser.optional == 10 def test_optional_is_not_required(tmp_path): class Parser(argclass.Parser):", "assert parser.log.level == logging.INFO assert parser.log.format == \"stream\" parser.parse_args([\"--log-level=debug\", \"--log-format=json\"]) assert parser.log.level ==", "List, Optional, FrozenSet import pytest import argclass class TestBasics: class Parser(argclass.Parser): integers: List[int]", "\"1\", \"2\", \"--bar=3\", \"--spam=4\"]) assert parser.foo == [1, 2] assert parser.bar == [3]", "assert parser.pool_size == 4 assert parser.forks == 2 parser.parse_args([ \"--debug\", \"--forks=8\", \"--pool-size=2\", \"--confused-default\",", "class AddressPort(argclass.Group): address: str port: int class Parser(argclass.Parser, AddressPort): pass parser = Parser()", "HostPortGroup(argclass.Group): host: str port: int class TestFoo: class Parser(argclass.Parser): foo: str = argclass.Argument(help=\"foo\")", "parser.parse_args([]) assert parser.debug is False assert parser.confused_default is True assert parser.pool_size == 4", "argclass.Argument( choices=(\"json\", \"stream\"), default=\"stream\" ) class Parser(argclass.Parser): log = LogGroup() parser = Parser()", "= Parser() parser.parse_args([]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([]) parser.parse_args([\"--args-set\", \"1\"]) assert", "title=\"GRPC host and port\", defaults={\"port\": 6000, \"host\": \"::\"}, ) def test_simple(self): parser =", "title=\"HTTP host and port\", prefix=\"api\", defaults={ \"port\": 80, \"host\": \"0.0.0.0\", }, ) grpc:", "= argclass.Argument(nargs=\"*\") spam: int = argclass.Argument(nargs=1) parser = Parser() parser.parse_args([\"--foo\", \"1\", \"2\", \"--bar=3\",", "None assert not parser.flag parser.parse_args([\"--flag\"]) assert parser.flag for variant in (\"yes\", \"Y\", \"yeS\",", "= Parser() parser.parse_args([\"--foo\", \"1\", \"2\", \"--bar=3\", \"--spam=4\"]) assert parser.foo == [1, 2] assert", "foo: str = argclass.Argument(\"-F\") class Parser(argclass.Parser): group = Group() parser = Parser() parser.parse_args([\"-F\",", "Parser() parser.parse_args([]) finally: del os.environ['NARGS'] assert parser.nargs == frozenset({1, 2, 3}) def test_nargs_config_list(tmp_path):", "argclass.Argument(\"-F\") class Parser(argclass.Parser): group = Group() parser = Parser() parser.parse_args([\"-F\", \"egg\"]) assert parser.group.foo", "parser.address == \"0.0.0.0\" assert parser.port == 9876 def test_config_for_required(tmp_path): class Parser(argclass.Parser): required: int", "== frozenset([]) parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\", \"4\", \"5\"]) assert isinstance(parser.args_set, frozenset) assert parser.args_set", "== [1, 2, 3] class HostPortGroup(argclass.Group): host: str port: int class TestFoo: class", "parser = Parser() parser.parse_args([\"--foo\", \"1\", \"2\", \"--bar=3\", \"--spam=4\"]) assert parser.foo == [1, 2]", "with pytest.raises(AttributeError): _ = parser.foo def test_environment(self, request: pytest.FixtureRequest): prefix = re.sub(r\"\\d+\", \"\",", "1 def test_print_help(capsys: pytest.CaptureFixture): class Parser(argclass.Parser): foo: str bar: int = 0 parser", "= argclass.Argument( type=int, nargs=\"+\", converter=frozenset ) parser = Parser() parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\",", "frozenset({1, 2, 3, 4}) def test_nargs_config_set(tmp_path): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int,", "logging.ERROR assert parser.log.format == \"json\" def test_environment_required(): class Parser(argclass.Parser): required: int parser =", "class Parser(argclass.Parser): flag: bool optional: Optional[bool] parser = Parser() parser.parse_args([]) assert parser.optional is", "os.environ.pop(env_var)) parser = Parser() parser.parse_args([]) assert parser.foo == expected def test_nargs(): class Parser(argclass.Parser):", "== \"bar\" parser.parse_args([ \"--foo=bar\", \"--api-host=127.0.0.1\", \"--api-port=8080\", \"--grpc-host=127.0.0.2\", \"--grpc-port=9000\", ]) assert parser.foo == \"bar\"", "pytest.FixtureRequest): prefix = re.sub(r\"\\d+\", \"\", uuid.uuid4().hex + uuid.uuid4().hex).upper() expected = uuid.uuid4().hex os.environ[f\"{prefix}_FOO\"] =", "parser.optional is None parser.parse_args([\"--optional=10\"]) assert parser.optional == 10 def test_optional_is_not_required(tmp_path): class Parser(argclass.Parser): optional:", "== 10 parser = Parser(config_files=[]) with pytest.raises(SystemExit): parser.parse_args([]) def test_minimal_optional(tmp_path): class Parser(argclass.Parser): optional:", "\"100\" parser.parse_args([]) assert parser.required == 100 os.environ.pop('TEST_REQUIRED') with pytest.raises(SystemExit): parser.parse_args([]) def test_nargs_and_converter(): class", "argclass.Argument( type=int, nargs=\"+\", converter=frozenset ) parser = Parser() parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\", \"4\",", "2, 3, 4}) def test_nargs_config_set(tmp_path): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\",", "parser.args_set == frozenset([]) parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\", \"4\", \"5\"]) assert isinstance(parser.args_set, frozenset) assert", "nargs=1, converter=frozenset ) parser = Parser() parser.parse_args([]) assert isinstance(parser.args_set, frozenset) assert parser.args_set ==", "self.Parser() parser.parse_args([\"--foo=bar\"]) assert parser.foo == \"bar\" assert parser.http.host == \"0.0.0.0\" assert parser.http.port ==", "parser.pool_size == 2 assert parser.forks == 8 def test_inheritance(): class AddressPort(argclass.Group): address: str", "parser.parse_args([]) assert parser.log.level == logging.INFO assert parser.log.format == \"stream\" parser.parse_args([\"--log-level=debug\", \"--log-format=json\"]) assert parser.log.level", "type=int, nargs=argclass.Nargs.ONE_OR_MORE, metavar=\"N\", help=\"an integer for the accumulator\", ) accumulate = argclass.Argument( \"--sum\",", "in captured.out assert \"[--bar BAR]\" in captured.out def test_print_log_level(capsys: pytest.CaptureFixture): class Parser(argclass.Parser): log_level:", "parser.parse_args([]) assert parser.foo == expected def test_nargs(): class Parser(argclass.Parser): foo: List[int] = argclass.Argument(", "== logging.INFO parser.parse_args([\"--log-level=warning\"]) assert parser.log_level == logging.WARNING def test_optional_type(): class Parser(argclass.Parser): flag: bool", "parser.parse_args([\"--optional=20\"]) assert parser.optional == 20 def test_minimal_required(tmp_path): class Parser(argclass.Parser): required: int parser =", "\"1\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is True for variant in (\"no\", \"crap\", \"false\", \"disabled\",", "Parser() parser.parse_args([]) assert parser.log.level == logging.ERROR assert parser.log.format == \"json\" def test_environment_required(): class", "class Parser(argclass.Parser): foo: str = argclass.Argument(env_var=env_var) expected = uuid.uuid4().hex os.environ[env_var] = expected request.addfinalizer(lambda:", "expected = uuid.uuid4().hex os.environ[f\"{prefix}_FOO\"] = expected request.addfinalizer(lambda: os.environ.pop(f\"{prefix}_FOO\")) parser = self.Parser(auto_env_var_prefix=f\"{prefix}_\") parser.parse_args([]) assert", "\"bar\" assert parser.http.host == \"0.0.0.0\" assert parser.http.port == 80 assert parser.grpc.host == \"::\"", "assert parser.foo == \"bar\" parser.parse_args([\"--foo=bar\"]) assert parser.foo == \"bar\" def test_group(self): parser =", ") def test_simple(self): parser = self.Parser() parser.parse_args([\"1\", \"2\", \"3\"]) assert parser.integers assert parser.integers", "(default: find the max)\", ) def test_simple(self): parser = self.Parser() parser.parse_args([\"1\", \"2\", \"3\"])", "os.environ.pop(f\"{prefix}_FOO\")) parser = self.Parser(auto_env_var_prefix=f\"{prefix}_\") parser.parse_args([]) assert parser.foo == expected def test_env_var(request: pytest.FixtureRequest): env_var", "parser = self.Parser() parser.parse_args([\"1\", \"2\", \"3\"]) assert parser.integers assert parser.integers == [1, 2,", "== \"json\" def test_environment_required(): class Parser(argclass.Parser): required: int parser = Parser(auto_env_var_prefix=\"TEST_\") os.environ['TEST_REQUIRED'] =", "LogGroup(argclass.Group): level: int = argclass.LogLevel format: str = argclass.Argument( choices=(\"json\", \"stream\") ) class", "\"bar\"]) assert parser.foo == \"bar\" parser.parse_args([\"--foo=bar\"]) assert parser.foo == \"bar\" def test_group(self): parser", "assert parser.log_level == logging.INFO parser.parse_args([\"--log-level=warning\"]) assert parser.log_level == logging.WARNING def test_optional_type(): class Parser(argclass.Parser):", "pass parser = Parser() parser.parse_args([\"--address=0.0.0.0\", \"--port=9876\"]) assert parser.address == \"0.0.0.0\" assert parser.port ==", "class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) conf_file =", "= Parser() parser.parse_args([\"--log-level\", \"info\"]) assert parser.log_level == logging.INFO parser.parse_args([\"--log-level=warning\"]) assert parser.log_level == logging.WARNING", "prefix = re.sub(r\"\\d+\", \"\", uuid.uuid4().hex + uuid.uuid4().hex).upper() expected = uuid.uuid4().hex os.environ[f\"{prefix}_FOO\"] = expected", "repr(parser) assert r == \"<Parser: 1 arguments, 2 groups, 0 subparsers>\" def test_access_to_not_parsed_attrs(self):", "request: pytest.FixtureRequest): prefix = re.sub(r\"\\d+\", \"\", uuid.uuid4().hex + uuid.uuid4().hex).upper() expected = uuid.uuid4().hex os.environ[f\"{prefix}_FOO\"]", "\"stream\"), default=\"stream\" ) class Parser(argclass.Parser): log = LogGroup() parser = Parser() parser.parse_args([]) assert", "converter=frozenset, env_var=\"NARGS\" ) os.environ['NARGS'] = \"[1, 2, 3]\" try: parser = Parser() parser.parse_args([])", "\"config.ini\" with open(config_path, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"required = 10\\n\") fp.write(\"\\n\") parser =", "prefix=\"api\", defaults={ \"port\": 80, \"host\": \"0.0.0.0\", }, ) grpc: HostPortGroup = HostPortGroup( title=\"GRPC", "\"enable\", \"ENABLED\", \"1\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is True for variant in (\"no\", \"crap\",", "\"--forks=8\", \"--pool-size=2\", \"--confused-default\", ]) assert parser.debug is True assert parser.confused_default is False assert", "captured.out assert \"--foo FOO\" in captured.out assert \"[--bar BAR]\" in captured.out def test_print_log_level(capsys:", "Parser() parser.parse_args([]) assert parser.optional is None assert not parser.flag parser.parse_args([\"--flag\"]) assert parser.flag for", "True pool_size: int = 4 forks: int = 2 parser = Parser() parser.parse_args([])", "format: str = argclass.Argument( choices=(\"json\", \"stream\") ) class Parser(argclass.Parser): log = LogGroup(defaults=dict(format=\"json\", level=\"error\"))", "Parser(argclass.Parser): log = LogGroup() parser = Parser() parser.parse_args([]) assert parser.log.level == logging.INFO assert", "\"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = {1, 2, 3, 4}\\n\") parser = Parser(config_files=[conf_file])", "parser.foo == expected def test_env_var(request: pytest.FixtureRequest): env_var = re.sub(r\"\\d+\", \"\", uuid.uuid4().hex + uuid.uuid4().hex).upper()", "expected def test_env_var(request: pytest.FixtureRequest): env_var = re.sub(r\"\\d+\", \"\", uuid.uuid4().hex + uuid.uuid4().hex).upper() class Parser(argclass.Parser):", ") class Parser(argclass.Parser): log = LogGroup() parser = Parser() parser.parse_args([]) assert parser.log.level ==", ") parser = Parser() parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\", \"4\", \"5\"]) assert isinstance(parser.args_set, frozenset)", "\"crap\", \"false\", \"disabled\", \"MY_HANDS_TYPING_WORDS\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is False def test_argument_defaults(): class Parser(argclass.Parser):", "parser.integers == [1, 2, 3] class HostPortGroup(argclass.Group): host: str port: int class TestFoo:", "captured.out assert \"--help\" in captured.out assert \"--foo FOO\" in captured.out assert \"[--bar BAR]\"", "str = argclass.Argument(env_var=env_var) expected = uuid.uuid4().hex os.environ[env_var] = expected request.addfinalizer(lambda: os.environ.pop(env_var)) parser =", "parser = Parser() parser.parse_args([]) assert parser.foo == expected def test_nargs(): class Parser(argclass.Parser): foo:", "assert parser.optional is False def test_argument_defaults(): class Parser(argclass.Parser): debug: bool = False confused_default:", "parser.grpc.host == \"127.0.0.2\" assert parser.grpc.port == 9000 def test_group_defaults(self): parser = self.Parser() parser.parse_args([\"--foo=bar\"])", "assert parser.foo == \"spam\" assert parser.bar == 1 def test_print_help(capsys: pytest.CaptureFixture): class Parser(argclass.Parser):", "parser.parse_args([\"--log-level=warning\"]) assert parser.log_level == logging.WARNING def test_optional_type(): class Parser(argclass.Parser): flag: bool optional: Optional[bool]", "frozenset) assert parser.args_set == frozenset([1, 2, 3, 4, 5]) def test_nargs_and_converter_not_required(): class Parser(argclass.Parser):", "bool optional: Optional[bool] parser = Parser() parser.parse_args([]) assert parser.optional is None assert not", "\"Y\", \"yeS\", \"enable\", \"ENABLED\", \"1\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is True for variant in", "required: int parser = Parser(auto_env_var_prefix=\"TEST_\") os.environ['TEST_REQUIRED'] = \"100\" parser.parse_args([]) assert parser.required == 100", "class HostPortGroup(argclass.Group): host: str port: int class TestFoo: class Parser(argclass.Parser): foo: str =", "class TestBasics: class Parser(argclass.Parser): integers: List[int] = argclass.Argument( \"integers\", type=int, nargs=argclass.Nargs.ONE_OR_MORE, metavar=\"N\", help=\"an", "True assert parser.pool_size == 4 assert parser.forks == 2 parser.parse_args([ \"--debug\", \"--forks=8\", \"--pool-size=2\",", "format = argclass.Argument( choices=(\"json\", \"stream\"), default=\"stream\" ) class Parser(argclass.Parser): log = LogGroup() parser", "groups, 0 subparsers>\" def test_access_to_not_parsed_attrs(self): parser = self.Parser() with pytest.raises(AttributeError): _ = parser.foo", "self.Parser() r = repr(parser) assert r == \"<Parser: 1 arguments, 2 groups, 0", "\"<Parser: 1 arguments, 2 groups, 0 subparsers>\" def test_access_to_not_parsed_attrs(self): parser = self.Parser() with", "and port\", defaults={\"port\": 6000, \"host\": \"::\"}, ) def test_simple(self): parser = self.Parser() parser.parse_args([\"--foo\",", "parser.debug is True assert parser.confused_default is False assert parser.pool_size == 2 assert parser.forks", "Parser() parser.parse_args([\"--foo=spam\", \"--bar=1\"]) assert parser.foo == \"spam\" assert parser.bar == 1 def test_print_help(capsys:", "captured.out def test_print_log_level(capsys: pytest.CaptureFixture): class Parser(argclass.Parser): log_level: int = argclass.LogLevel parser = Parser()", "assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([1, 2, 3, 4, 5]) def test_nargs_and_converter_not_required():", "parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is False def test_argument_defaults(): class Parser(argclass.Parser): debug: bool = False", "with open(config_path, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"required = 10\\n\") fp.write(\"\\n\") parser = Parser(config_files=[config_path])", "argclass.Argument( choices=(\"json\", \"stream\") ) class Parser(argclass.Parser): log = LogGroup(defaults=dict(format=\"json\", level=\"error\")) parser = Parser()", "is True assert parser.pool_size == 4 assert parser.forks == 2 parser.parse_args([ \"--debug\", \"--forks=8\",", "defaults={ \"port\": 80, \"host\": \"0.0.0.0\", }, ) grpc: HostPortGroup = HostPortGroup( title=\"GRPC host", "= self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo == \"bar\" parser.parse_args([\"--foo=bar\"]) assert parser.foo == \"bar\"", "os.environ[f\"{prefix}_FOO\"] = expected request.addfinalizer(lambda: os.environ.pop(f\"{prefix}_FOO\")) parser = self.Parser(auto_env_var_prefix=f\"{prefix}_\") parser.parse_args([]) assert parser.foo == expected", "fp.write(\"required = 10\\n\") fp.write(\"\\n\") parser = Parser(config_files=[config_path]) parser.parse_args([]) assert parser.required == 10 parser", "class Parser(argclass.Parser): foo: str bar: int parser = Parser() parser.parse_args([\"--foo=spam\", \"--bar=1\"]) assert parser.foo", "import List, Optional, FrozenSet import pytest import argclass class TestBasics: class Parser(argclass.Parser): integers:", "is None parser.parse_args([\"--optional=20\"]) assert parser.optional == 20 def test_minimal_required(tmp_path): class Parser(argclass.Parser): required: int", "os.environ['NARGS'] assert parser.nargs == frozenset({1, 2, 3}) def test_nargs_config_list(tmp_path): class Parser(argclass.Parser): nargs: FrozenSet[int]", "\"egg\"]) assert parser.group.foo == \"egg\" def test_short_parser_definition(): class Parser(argclass.Parser): foo: str bar: int", "captured.out assert \"--bar\" in captured.out assert \"--help\" in captured.out assert \"--foo FOO\" in", "test_log_group_defaults(): class LogGroup(argclass.Group): level: int = argclass.LogLevel format: str = argclass.Argument( choices=(\"json\", \"stream\")", "= argclass.Argument( \"--sum\", action=argclass.Actions.STORE_CONST, const=sum, default=max, help=\"sum the integers (default: find the max)\",", "nargs=\"*\", converter=frozenset ) parser = Parser() parser.parse_args([]) assert isinstance(parser.args_set, frozenset) assert parser.args_set ==", "def test_group(self): parser = self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo == \"bar\" parser.parse_args([ \"--foo=bar\",", "FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) conf_file = tmp_path / \"config.ini\"", "== expected def test_nargs(): class Parser(argclass.Parser): foo: List[int] = argclass.Argument( nargs=argclass.Nargs.ZERO_OR_MORE, type=int, )", "in (\"no\", \"crap\", \"false\", \"disabled\", \"MY_HANDS_TYPING_WORDS\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is False def test_argument_defaults():", "False assert parser.confused_default is True assert parser.pool_size == 4 assert parser.forks == 2", "assert parser.forks == 2 parser.parse_args([ \"--debug\", \"--forks=8\", \"--pool-size=2\", \"--confused-default\", ]) assert parser.debug is", "import pytest import argclass class TestBasics: class Parser(argclass.Parser): integers: List[int] = argclass.Argument( \"integers\",", "import re import uuid from typing import List, Optional, FrozenSet import pytest import", "class LogGroup(argclass.Group): level: int = argclass.LogLevel format: str = argclass.Argument( choices=(\"json\", \"stream\") )", "3, 4, 5]) def test_nargs_1(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=1,", "request.addfinalizer(lambda: os.environ.pop(f\"{prefix}_FOO\")) parser = self.Parser(auto_env_var_prefix=f\"{prefix}_\") parser.parse_args([]) assert parser.foo == expected def test_env_var(request: pytest.FixtureRequest):", "parser.optional is False def test_argument_defaults(): class Parser(argclass.Parser): debug: bool = False confused_default: bool", "assert parser.address == \"0.0.0.0\" assert parser.port == 9876 def test_config_for_required(tmp_path): class Parser(argclass.Parser): required:", "the accumulator\", ) accumulate = argclass.Argument( \"--sum\", action=argclass.Actions.STORE_CONST, const=sum, default=max, help=\"sum the integers", "[3] assert parser.spam == [4] def test_group_aliases(): class Group(argclass.Group): foo: str = argclass.Argument(\"-F\")", "= HostPortGroup( title=\"HTTP host and port\", prefix=\"api\", defaults={ \"port\": 80, \"host\": \"0.0.0.0\", },", "is True assert parser.confused_default is False assert parser.pool_size == 2 assert parser.forks ==", "False def test_argument_defaults(): class Parser(argclass.Parser): debug: bool = False confused_default: bool = True", "from typing import List, Optional, FrozenSet import pytest import argclass class TestBasics: class", "== 8080 assert parser.grpc.host == \"127.0.0.2\" assert parser.grpc.port == 9000 def test_group_defaults(self): parser", "= Parser() parser.parse_args([\"--foo=spam\", \"--bar=1\"]) assert parser.foo == \"spam\" assert parser.bar == 1 def", "foo: List[int] = argclass.Argument( nargs=argclass.Nargs.ZERO_OR_MORE, type=int, ) bar: int = argclass.Argument(nargs=\"*\") spam: int", "parser.optional is None assert not parser.flag parser.parse_args([\"--flag\"]) assert parser.flag for variant in (\"yes\",", ") class Parser(argclass.Parser): log = LogGroup(defaults=dict(format=\"json\", level=\"error\")) parser = Parser() parser.parse_args([]) assert parser.log.level", "== 1 def test_print_help(capsys: pytest.CaptureFixture): class Parser(argclass.Parser): foo: str bar: int = 0", "\"integers\", type=int, nargs=argclass.Nargs.ONE_OR_MORE, metavar=\"N\", help=\"an integer for the accumulator\", ) accumulate = argclass.Argument(", "parser.parse_args([\"--foo=bar\"]) assert parser.foo == \"bar\" def test_group(self): parser = self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert", "parser.bar == [3] assert parser.spam == [4] def test_group_aliases(): class Group(argclass.Group): foo: str", "Parser(argclass.Parser): group = Group() parser = Parser() parser.parse_args([\"-F\", \"egg\"]) assert parser.group.foo == \"egg\"", "\"::\"}, ) def test_simple(self): parser = self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo == \"bar\"", "= argclass.Argument( choices=(\"json\", \"stream\") ) class Parser(argclass.Parser): log = LogGroup(defaults=dict(format=\"json\", level=\"error\")) parser =", "== \"bar\" parser.parse_args([\"--foo=bar\"]) assert parser.foo == \"bar\" def test_group(self): parser = self.Parser() parser.parse_args([\"--foo\",", "\"::\" assert parser.grpc.port == 6000 def test_parser_repr(self): parser = self.Parser() r = repr(parser)", "parser = self.Parser(auto_env_var_prefix=f\"{prefix}_\") parser.parse_args([]) assert parser.foo == expected def test_env_var(request: pytest.FixtureRequest): env_var =", "foo: str = argclass.Argument(help=\"foo\") http: HostPortGroup = HostPortGroup( title=\"HTTP host and port\", prefix=\"api\",", "test_print_help(capsys: pytest.CaptureFixture): class Parser(argclass.Parser): foo: str bar: int = 0 parser = Parser()", "capsys.readouterr() assert \"--foo\" in captured.out assert \"--bar\" in captured.out assert \"--help\" in captured.out", "\"false\", \"disabled\", \"MY_HANDS_TYPING_WORDS\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is False def test_argument_defaults(): class Parser(argclass.Parser): debug:", "Parser(config_files=[config_path]) parser.parse_args([]) assert parser.required == 10 parser = Parser(config_files=[]) with pytest.raises(SystemExit): parser.parse_args([]) def", "== 2 assert parser.forks == 8 def test_inheritance(): class AddressPort(argclass.Group): address: str port:", "parser = self.Parser() with pytest.raises(AttributeError): _ = parser.foo def test_environment(self, request: pytest.FixtureRequest): prefix", "= False confused_default: bool = True pool_size: int = 4 forks: int =", "== 2 parser.parse_args([ \"--debug\", \"--forks=8\", \"--pool-size=2\", \"--confused-default\", ]) assert parser.debug is True assert", "def test_simple(self): parser = self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo == \"bar\" parser.parse_args([\"--foo=bar\"]) assert", "2 groups, 0 subparsers>\" def test_access_to_not_parsed_attrs(self): parser = self.Parser() with pytest.raises(AttributeError): _ =", "captured = capsys.readouterr() assert \"--foo\" in captured.out assert \"--bar\" in captured.out assert \"--help\"", "10\\n\") fp.write(\"\\n\") parser = Parser(config_files=[config_path]) parser.parse_args([]) assert parser.required == 10 parser = Parser(config_files=[])", "== \"::\" assert parser.grpc.port == 6000 def test_parser_repr(self): parser = self.Parser() r =", "the max)\", ) def test_simple(self): parser = self.Parser() parser.parse_args([\"1\", \"2\", \"3\"]) assert parser.integers", "argclass.LogLevel parser = Parser() parser.parse_args([\"--log-level\", \"info\"]) assert parser.log_level == logging.INFO parser.parse_args([\"--log-level=warning\"]) assert parser.log_level", "self.Parser() parser.parse_args([\"1\", \"2\", \"3\"]) assert parser.integers assert parser.integers == [1, 2, 3] class", "parser.http.port == 8080 assert parser.grpc.host == \"127.0.0.2\" assert parser.grpc.port == 9000 def test_group_defaults(self):", "test_nargs_config_set(tmp_path): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) conf_file", "parser = Parser(config_files=[config_path]) parser.parse_args([]) assert parser.required == 10 parser = Parser(config_files=[]) with pytest.raises(SystemExit):", "test_minimal_required(tmp_path): class Parser(argclass.Parser): required: int parser = Parser() with pytest.raises(SystemExit): parser.parse_args([]) parser.parse_args([\"--required=20\"]) assert", "parser.log.format == \"json\" def test_environment_required(): class Parser(argclass.Parser): required: int parser = Parser(auto_env_var_prefix=\"TEST_\") os.environ['TEST_REQUIRED']", "fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = [1, 2, 3, 4]\\n\") parser = Parser(config_files=[conf_file]) parser.parse_args([]) assert parser.nargs", "host: str port: int class TestFoo: class Parser(argclass.Parser): foo: str = argclass.Argument(help=\"foo\") http:", "parser.debug is False assert parser.confused_default is True assert parser.pool_size == 4 assert parser.forks", "class Parser(argclass.Parser): required: int = argclass.Argument(required=True) config_path = tmp_path / \"config.ini\" with open(config_path,", "str = argclass.Argument(help=\"foo\") http: HostPortGroup = HostPortGroup( title=\"HTTP host and port\", prefix=\"api\", defaults={", "assert parser.foo == expected def test_nargs(): class Parser(argclass.Parser): foo: List[int] = argclass.Argument( nargs=argclass.Nargs.ZERO_OR_MORE,", "parser = Parser() parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\", \"4\", \"5\"]) assert isinstance(parser.args_set, frozenset) assert", "nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) conf_file = tmp_path /", "request.addfinalizer(lambda: os.environ.pop(env_var)) parser = Parser() parser.parse_args([]) assert parser.foo == expected def test_nargs(): class", "\"--help\" in captured.out assert \"--foo FOO\" in captured.out assert \"[--bar BAR]\" in captured.out", "int = argclass.LogLevel parser = Parser() parser.parse_args([\"--log-level\", \"info\"]) assert parser.log_level == logging.INFO parser.parse_args([\"--log-level=warning\"])", "parser.parse_args([]) assert parser.foo == expected def test_env_var(request: pytest.FixtureRequest): env_var = re.sub(r\"\\d+\", \"\", uuid.uuid4().hex", "parser.parse_args([\"--foo\", \"1\", \"2\", \"--bar=3\", \"--spam=4\"]) assert parser.foo == [1, 2] assert parser.bar ==", "level: int = argclass.LogLevel format = argclass.Argument( choices=(\"json\", \"stream\"), default=\"stream\" ) class Parser(argclass.Parser):", "assert parser.forks == 8 def test_inheritance(): class AddressPort(argclass.Group): address: str port: int class", "parser.foo == \"bar\" parser.parse_args([\"--foo=bar\"]) assert parser.foo == \"bar\" def test_group(self): parser = self.Parser()", "= LogGroup() parser = Parser() parser.parse_args([]) assert parser.log.level == logging.INFO assert parser.log.format ==", "parser.log.format == \"json\" def test_log_group_defaults(): class LogGroup(argclass.Group): level: int = argclass.LogLevel format: str", "= parser.foo def test_environment(self, request: pytest.FixtureRequest): prefix = re.sub(r\"\\d+\", \"\", uuid.uuid4().hex + uuid.uuid4().hex).upper()", "assert parser.debug is False assert parser.confused_default is True assert parser.pool_size == 4 assert", "level: int = argclass.LogLevel format: str = argclass.Argument( choices=(\"json\", \"stream\") ) class Parser(argclass.Parser):", "assert \"--foo FOO\" in captured.out assert \"[--bar BAR]\" in captured.out def test_print_log_level(capsys: pytest.CaptureFixture):", "\"--grpc-host=127.0.0.2\", \"--grpc-port=9000\", ]) assert parser.foo == \"bar\" assert parser.http.host == \"127.0.0.1\" assert parser.http.port", "const=sum, default=max, help=\"sum the integers (default: find the max)\", ) def test_simple(self): parser", "for variant in (\"yes\", \"Y\", \"yeS\", \"enable\", \"ENABLED\", \"1\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is", "parser.print_help() captured = capsys.readouterr() assert \"--foo\" in captured.out assert \"--bar\" in captured.out assert", ") grpc: HostPortGroup = HostPortGroup( title=\"GRPC host and port\", defaults={\"port\": 6000, \"host\": \"::\"},", "Parser(argclass.Parser): foo: List[int] = argclass.Argument( nargs=argclass.Nargs.ZERO_OR_MORE, type=int, ) bar: int = argclass.Argument(nargs=\"*\") spam:", "assert parser.flag for variant in (\"yes\", \"Y\", \"yeS\", \"enable\", \"ENABLED\", \"1\"): parser.parse_args([f\"--optional={variant}\"]) assert", "Parser(argclass.Parser): optional: Optional[int] = argclass.Argument(required=False) parser = Parser() parser.parse_args([]) assert parser.optional is None", "\"host\": \"0.0.0.0\", }, ) grpc: HostPortGroup = HostPortGroup( title=\"GRPC host and port\", defaults={\"port\":", "def test_optional_is_not_required(tmp_path): class Parser(argclass.Parser): optional: Optional[int] = argclass.Argument(required=False) parser = Parser() parser.parse_args([]) assert", "os.environ['TEST_REQUIRED'] = \"100\" parser.parse_args([]) assert parser.required == 100 os.environ.pop('TEST_REQUIRED') with pytest.raises(SystemExit): parser.parse_args([]) def", "class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=1, converter=frozenset ) parser = Parser()", "argclass.Argument( \"integers\", type=int, nargs=argclass.Nargs.ONE_OR_MORE, metavar=\"N\", help=\"an integer for the accumulator\", ) accumulate =", "type=int, ) bar: int = argclass.Argument(nargs=\"*\") spam: int = argclass.Argument(nargs=1) parser = Parser()", "2 assert parser.forks == 8 def test_inheritance(): class AddressPort(argclass.Group): address: str port: int", "test_environment_required(): class Parser(argclass.Parser): required: int parser = Parser(auto_env_var_prefix=\"TEST_\") os.environ['TEST_REQUIRED'] = \"100\" parser.parse_args([]) assert", "= re.sub(r\"\\d+\", \"\", uuid.uuid4().hex + uuid.uuid4().hex).upper() expected = uuid.uuid4().hex os.environ[f\"{prefix}_FOO\"] = expected request.addfinalizer(lambda:", "def test_environment_required(): class Parser(argclass.Parser): required: int parser = Parser(auto_env_var_prefix=\"TEST_\") os.environ['TEST_REQUIRED'] = \"100\" parser.parse_args([])", "frozenset([1, 2, 3, 4, 5]) def test_nargs_1(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument(", "def test_group_defaults(self): parser = self.Parser() parser.parse_args([\"--foo=bar\"]) assert parser.foo == \"bar\" assert parser.http.host ==", "parser.confused_default is False assert parser.pool_size == 2 assert parser.forks == 8 def test_inheritance():", "parser.parse_args([\"--flag\"]) assert parser.flag for variant in (\"yes\", \"Y\", \"yeS\", \"enable\", \"ENABLED\", \"1\"): parser.parse_args([f\"--optional={variant}\"])", "assert parser.http.host == \"127.0.0.1\" assert parser.http.port == 8080 assert parser.grpc.host == \"127.0.0.2\" assert", "def test_short_parser_definition(): class Parser(argclass.Parser): foo: str bar: int parser = Parser() parser.parse_args([\"--foo=spam\", \"--bar=1\"])", "parser.parse_args([ \"--debug\", \"--forks=8\", \"--pool-size=2\", \"--confused-default\", ]) assert parser.debug is True assert parser.confused_default is", "default=\"stream\" ) class Parser(argclass.Parser): log = LogGroup() parser = Parser() parser.parse_args([]) assert parser.log.level", "nargs=\"+\", converter=frozenset ) parser = Parser() parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\", \"4\", \"5\"]) assert", "test_nargs_1(): class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=1, converter=frozenset ) parser =", "parser.grpc.port == 6000 def test_parser_repr(self): parser = self.Parser() r = repr(parser) assert r", "8 def test_inheritance(): class AddressPort(argclass.Group): address: str port: int class Parser(argclass.Parser, AddressPort): pass", "== \"bar\" assert parser.http.host == \"0.0.0.0\" assert parser.http.port == 80 assert parser.grpc.host ==", "== \"bar\" assert parser.http.host == \"127.0.0.1\" assert parser.http.port == 8080 assert parser.grpc.host ==", "argclass class TestBasics: class Parser(argclass.Parser): integers: List[int] = argclass.Argument( \"integers\", type=int, nargs=argclass.Nargs.ONE_OR_MORE, metavar=\"N\",", "parser.http.port == 80 assert parser.grpc.host == \"::\" assert parser.grpc.port == 6000 def test_parser_repr(self):", "\"2\", \"--bar=3\", \"--spam=4\"]) assert parser.foo == [1, 2] assert parser.bar == [3] assert", "logging.WARNING def test_optional_type(): class Parser(argclass.Parser): flag: bool optional: Optional[bool] parser = Parser() parser.parse_args([])", "\"--sum\", action=argclass.Actions.STORE_CONST, const=sum, default=max, help=\"sum the integers (default: find the max)\", ) def", "argclass.Argument( type=int, nargs=1, converter=frozenset ) parser = Parser() parser.parse_args([]) assert isinstance(parser.args_set, frozenset) assert", "test_simple(self): parser = self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo == \"bar\" parser.parse_args([\"--foo=bar\"]) assert parser.foo", "3}) def test_nargs_config_list(tmp_path): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\"", "2 parser = Parser() parser.parse_args([]) assert parser.debug is False assert parser.confused_default is True", "HostPortGroup = HostPortGroup( title=\"GRPC host and port\", defaults={\"port\": 6000, \"host\": \"::\"}, ) def", "variant in (\"yes\", \"Y\", \"yeS\", \"enable\", \"ENABLED\", \"1\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is True", "parser.confused_default is True assert parser.pool_size == 4 assert parser.forks == 2 parser.parse_args([ \"--debug\",", "subparsers>\" def test_access_to_not_parsed_attrs(self): parser = self.Parser() with pytest.raises(AttributeError): _ = parser.foo def test_environment(self,", "+ uuid.uuid4().hex).upper() expected = uuid.uuid4().hex os.environ[f\"{prefix}_FOO\"] = expected request.addfinalizer(lambda: os.environ.pop(f\"{prefix}_FOO\")) parser = self.Parser(auto_env_var_prefix=f\"{prefix}_\")", "Parser(argclass.Parser): foo: str bar: int parser = Parser() parser.parse_args([\"--foo=spam\", \"--bar=1\"]) assert parser.foo ==", "\"--bar\" in captured.out assert \"--help\" in captured.out assert \"--foo FOO\" in captured.out assert", "parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is True for variant in (\"no\", \"crap\", \"false\", \"disabled\", \"MY_HANDS_TYPING_WORDS\"):", "integers (default: find the max)\", ) def test_simple(self): parser = self.Parser() parser.parse_args([\"1\", \"2\",", "= Parser() parser.parse_args([]) assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([]) parser.parse_args([\"--args-set\", \"1\", \"2\",", "3] class HostPortGroup(argclass.Group): host: str port: int class TestFoo: class Parser(argclass.Parser): foo: str", "\"0.0.0.0\" assert parser.http.port == 80 assert parser.grpc.host == \"::\" assert parser.grpc.port == 6000", "uuid.uuid4().hex).upper() expected = uuid.uuid4().hex os.environ[f\"{prefix}_FOO\"] = expected request.addfinalizer(lambda: os.environ.pop(f\"{prefix}_FOO\")) parser = self.Parser(auto_env_var_prefix=f\"{prefix}_\") parser.parse_args([])", "\"\", uuid.uuid4().hex + uuid.uuid4().hex).upper() expected = uuid.uuid4().hex os.environ[f\"{prefix}_FOO\"] = expected request.addfinalizer(lambda: os.environ.pop(f\"{prefix}_FOO\")) parser", "assert parser.log.level == logging.DEBUG assert parser.log.format == \"json\" def test_log_group_defaults(): class LogGroup(argclass.Group): level:", "converter=frozenset ) parser = Parser() parser.parse_args([\"--args-set\", \"1\", \"2\", \"3\", \"4\", \"5\"]) assert isinstance(parser.args_set,", "= uuid.uuid4().hex os.environ[env_var] = expected request.addfinalizer(lambda: os.environ.pop(env_var)) parser = Parser() parser.parse_args([]) assert parser.foo", "parser.optional is True for variant in (\"no\", \"crap\", \"false\", \"disabled\", \"MY_HANDS_TYPING_WORDS\"): parser.parse_args([f\"--optional={variant}\"]) assert", "debug: bool = False confused_default: bool = True pool_size: int = 4 forks:", "assert parser.debug is True assert parser.confused_default is False assert parser.pool_size == 2 assert", "assert parser.log.format == \"json\" def test_environment_required(): class Parser(argclass.Parser): required: int parser = Parser(auto_env_var_prefix=\"TEST_\")", "argclass.Argument( type=int, nargs=\"*\", converter=frozenset ) parser = Parser() parser.parse_args([]) assert isinstance(parser.args_set, frozenset) assert", "2, 3, 4]\\n\") parser = Parser(config_files=[conf_file]) parser.parse_args([]) assert parser.nargs == frozenset({1, 2, 3,", "typing import List, Optional, FrozenSet import pytest import argclass class TestBasics: class Parser(argclass.Parser):", "nargs=argclass.Nargs.ONE_OR_MORE, metavar=\"N\", help=\"an integer for the accumulator\", ) accumulate = argclass.Argument( \"--sum\", action=argclass.Actions.STORE_CONST,", "None parser.parse_args([\"--optional=20\"]) assert parser.optional == 20 def test_minimal_required(tmp_path): class Parser(argclass.Parser): required: int parser", "assert parser.bar == 1 def test_print_help(capsys: pytest.CaptureFixture): class Parser(argclass.Parser): foo: str bar: int", "parser.args_set == frozenset([1, 2, 3, 4, 5]) def test_nargs_and_converter_not_required(): class Parser(argclass.Parser): args_set: FrozenSet[int]", "\"bar\" parser.parse_args([ \"--foo=bar\", \"--api-host=127.0.0.1\", \"--api-port=8080\", \"--grpc-host=127.0.0.2\", \"--grpc-port=9000\", ]) assert parser.foo == \"bar\" assert", "max)\", ) def test_simple(self): parser = self.Parser() parser.parse_args([\"1\", \"2\", \"3\"]) assert parser.integers assert", "logging import os import re import uuid from typing import List, Optional, FrozenSet", "= argclass.Argument(help=\"foo\") http: HostPortGroup = HostPortGroup( title=\"HTTP host and port\", prefix=\"api\", defaults={ \"port\":", "== \"spam\" assert parser.bar == 1 def test_print_help(capsys: pytest.CaptureFixture): class Parser(argclass.Parser): foo: str", "tmp_path / \"config.ini\" with open(conf_file, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = {1, 2,", "Optional[int] parser = Parser() parser.parse_args([]) assert parser.optional is None parser.parse_args([\"--optional=10\"]) assert parser.optional ==", "parser.parse_args([\"--log-level=debug\", \"--log-format=json\"]) assert parser.log.level == logging.DEBUG assert parser.log.format == \"json\" def test_log_group_defaults(): class", "uuid.uuid4().hex + uuid.uuid4().hex).upper() class Parser(argclass.Parser): foo: str = argclass.Argument(env_var=env_var) expected = uuid.uuid4().hex os.environ[env_var]", "class Parser(argclass.Parser): args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset ) parser = Parser()", "expected request.addfinalizer(lambda: os.environ.pop(f\"{prefix}_FOO\")) parser = self.Parser(auto_env_var_prefix=f\"{prefix}_\") parser.parse_args([]) assert parser.foo == expected def test_env_var(request:", "int parser = Parser() parser.parse_args([\"--foo=spam\", \"--bar=1\"]) assert parser.foo == \"spam\" assert parser.bar ==", "assert parser.log_level == logging.WARNING def test_optional_type(): class Parser(argclass.Parser): flag: bool optional: Optional[bool] parser", "required: int = argclass.Argument(required=True) config_path = tmp_path / \"config.ini\" with open(config_path, \"w\") as", "Parser(argclass.Parser): foo: str = argclass.Argument(help=\"foo\") http: HostPortGroup = HostPortGroup( title=\"HTTP host and port\",", "test_access_to_not_parsed_attrs(self): parser = self.Parser() with pytest.raises(AttributeError): _ = parser.foo def test_environment(self, request: pytest.FixtureRequest):", "Parser() parser.print_help() captured = capsys.readouterr() assert \"--foo\" in captured.out assert \"--bar\" in captured.out", "foo: str bar: int parser = Parser() parser.parse_args([\"--foo=spam\", \"--bar=1\"]) assert parser.foo == \"spam\"", "assert isinstance(parser.args_set, frozenset) assert parser.args_set == frozenset([]) parser.parse_args([\"--args-set\", \"1\"]) assert isinstance(parser.args_set, frozenset) assert", "Parser(argclass.Parser): required: int parser = Parser(auto_env_var_prefix=\"TEST_\") os.environ['TEST_REQUIRED'] = \"100\" parser.parse_args([]) assert parser.required ==", "level=\"error\")) parser = Parser() parser.parse_args([]) assert parser.log.level == logging.ERROR assert parser.log.format == \"json\"", "def test_nargs_env_var(): class Parser(argclass.Parser): nargs: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" )", "= Parser() parser.parse_args([\"-F\", \"egg\"]) assert parser.group.foo == \"egg\" def test_short_parser_definition(): class Parser(argclass.Parser): foo:", "FrozenSet[int] = argclass.Argument( type=int, nargs=1, converter=frozenset ) parser = Parser() parser.parse_args([]) assert isinstance(parser.args_set,", "= argclass.Argument(required=False) parser = Parser() parser.parse_args([]) assert parser.optional is None parser.parse_args([\"--optional=20\"]) assert parser.optional", "HostPortGroup( title=\"HTTP host and port\", prefix=\"api\", defaults={ \"port\": 80, \"host\": \"0.0.0.0\", }, )", "= tmp_path / \"config.ini\" with open(conf_file, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = {1,", "parser = Parser() parser.parse_args([\"--log-level\", \"info\"]) assert parser.log_level == logging.INFO parser.parse_args([\"--log-level=warning\"]) assert parser.log_level ==", "import argclass class TestBasics: class Parser(argclass.Parser): integers: List[int] = argclass.Argument( \"integers\", type=int, nargs=argclass.Nargs.ONE_OR_MORE,", "== 4 assert parser.forks == 2 parser.parse_args([ \"--debug\", \"--forks=8\", \"--pool-size=2\", \"--confused-default\", ]) assert", "assert parser.pool_size == 2 assert parser.forks == 8 def test_inheritance(): class AddressPort(argclass.Group): address:", "test_group(self): parser = self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo == \"bar\" parser.parse_args([ \"--foo=bar\", \"--api-host=127.0.0.1\",", "= Parser(config_files=[]) with pytest.raises(SystemExit): parser.parse_args([]) def test_minimal_optional(tmp_path): class Parser(argclass.Parser): optional: Optional[int] parser =", "def test_minimal_required(tmp_path): class Parser(argclass.Parser): required: int parser = Parser() with pytest.raises(SystemExit): parser.parse_args([]) parser.parse_args([\"--required=20\"])", "FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset ) parser = Parser() parser.parse_args([]) assert isinstance(parser.args_set,", "\"--bar=1\"]) assert parser.foo == \"spam\" assert parser.bar == 1 def test_print_help(capsys: pytest.CaptureFixture): class", "== [1, 2] assert parser.bar == [3] assert parser.spam == [4] def test_group_aliases():", "os import re import uuid from typing import List, Optional, FrozenSet import pytest", "= 4 forks: int = 2 parser = Parser() parser.parse_args([]) assert parser.debug is", "group = Group() parser = Parser() parser.parse_args([\"-F\", \"egg\"]) assert parser.group.foo == \"egg\" def", "Parser(argclass.Parser): foo: str = argclass.Argument(env_var=env_var) expected = uuid.uuid4().hex os.environ[env_var] = expected request.addfinalizer(lambda: os.environ.pop(env_var))", "assert parser.confused_default is True assert parser.pool_size == 4 assert parser.forks == 2 parser.parse_args([", "parser = Parser(config_files=[]) with pytest.raises(SystemExit): parser.parse_args([]) def test_minimal_optional(tmp_path): class Parser(argclass.Parser): optional: Optional[int] parser", "class Parser(argclass.Parser): foo: str bar: int = 0 parser = Parser() parser.print_help() captured", "parser = Parser() parser.parse_args([\"-F\", \"egg\"]) assert parser.group.foo == \"egg\" def test_short_parser_definition(): class Parser(argclass.Parser):", "port\", defaults={\"port\": 6000, \"host\": \"::\"}, ) def test_simple(self): parser = self.Parser() parser.parse_args([\"--foo\", \"bar\"])", "frozenset) assert parser.args_set == frozenset([]) parser.parse_args([\"--args-set\", \"1\"]) assert isinstance(parser.args_set, frozenset) assert parser.args_set ==", "import os import re import uuid from typing import List, Optional, FrozenSet import", "Optional, FrozenSet import pytest import argclass class TestBasics: class Parser(argclass.Parser): integers: List[int] =", "parser.parse_args([]) assert parser.required == 100 os.environ.pop('TEST_REQUIRED') with pytest.raises(SystemExit): parser.parse_args([]) def test_nargs_and_converter(): class Parser(argclass.Parser):", "argclass.Argument( \"--sum\", action=argclass.Actions.STORE_CONST, const=sum, default=max, help=\"sum the integers (default: find the max)\", )", "FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) os.environ['NARGS'] = \"[1, 2, 3]\"", "\"port\": 80, \"host\": \"0.0.0.0\", }, ) grpc: HostPortGroup = HostPortGroup( title=\"GRPC host and", "\"--foo=bar\", \"--api-host=127.0.0.1\", \"--api-port=8080\", \"--grpc-host=127.0.0.2\", \"--grpc-port=9000\", ]) assert parser.foo == \"bar\" assert parser.http.host ==", "argclass.Argument( type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) conf_file = tmp_path / \"config.ini\" with open(conf_file,", "type=int, nargs=\"*\", converter=frozenset, env_var=\"NARGS\" ) os.environ['NARGS'] = \"[1, 2, 3]\" try: parser =", "== 10 def test_optional_is_not_required(tmp_path): class Parser(argclass.Parser): optional: Optional[int] = argclass.Argument(required=False) parser = Parser()", "parser.parse_args([\"--foo=bar\"]) assert parser.foo == \"bar\" assert parser.http.host == \"0.0.0.0\" assert parser.http.port == 80", "20 def test_log_group(): class LogGroup(argclass.Group): level: int = argclass.LogLevel format = argclass.Argument( choices=(\"json\",", "pool_size: int = 4 forks: int = 2 parser = Parser() parser.parse_args([]) assert", "test_parser_repr(self): parser = self.Parser() r = repr(parser) assert r == \"<Parser: 1 arguments,", "8080 assert parser.grpc.host == \"127.0.0.2\" assert parser.grpc.port == 9000 def test_group_defaults(self): parser =", ") os.environ['NARGS'] = \"[1, 2, 3]\" try: parser = Parser() parser.parse_args([]) finally: del", "open(config_path, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"required = 10\\n\") fp.write(\"\\n\") parser = Parser(config_files=[config_path]) parser.parse_args([])", ") bar: int = argclass.Argument(nargs=\"*\") spam: int = argclass.Argument(nargs=1) parser = Parser() parser.parse_args([\"--foo\",", "in (\"yes\", \"Y\", \"yeS\", \"enable\", \"ENABLED\", \"1\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional is True for", "}, ) grpc: HostPortGroup = HostPortGroup( title=\"GRPC host and port\", defaults={\"port\": 6000, \"host\":", "try: parser = Parser() parser.parse_args([]) finally: del os.environ['NARGS'] assert parser.nargs == frozenset({1, 2,", "str port: int class TestFoo: class Parser(argclass.Parser): foo: str = argclass.Argument(help=\"foo\") http: HostPortGroup", "is True for variant in (\"no\", \"crap\", \"false\", \"disabled\", \"MY_HANDS_TYPING_WORDS\"): parser.parse_args([f\"--optional={variant}\"]) assert parser.optional", "LogGroup(defaults=dict(format=\"json\", level=\"error\")) parser = Parser() parser.parse_args([]) assert parser.log.level == logging.ERROR assert parser.log.format ==", "assert parser.integers assert parser.integers == [1, 2, 3] class HostPortGroup(argclass.Group): host: str port:", "= \"100\" parser.parse_args([]) assert parser.required == 100 os.environ.pop('TEST_REQUIRED') with pytest.raises(SystemExit): parser.parse_args([]) def test_nargs_and_converter():", "conf_file = tmp_path / \"config.ini\" with open(conf_file, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs =", "def test_env_var(request: pytest.FixtureRequest): env_var = re.sub(r\"\\d+\", \"\", uuid.uuid4().hex + uuid.uuid4().hex).upper() class Parser(argclass.Parser): foo:", "args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=\"+\", converter=frozenset ) parser = Parser() parser.parse_args([\"--args-set\", \"1\",", "6000 def test_parser_repr(self): parser = self.Parser() r = repr(parser) assert r == \"<Parser:", "parser.parse_args([\"--optional=10\"]) assert parser.optional == 10 def test_optional_is_not_required(tmp_path): class Parser(argclass.Parser): optional: Optional[int] = argclass.Argument(required=False)", "def test_inheritance(): class AddressPort(argclass.Group): address: str port: int class Parser(argclass.Parser, AddressPort): pass parser", "= self.Parser() parser.parse_args([\"--foo=bar\"]) assert parser.foo == \"bar\" assert parser.http.host == \"0.0.0.0\" assert parser.http.port", "as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"required = 10\\n\") fp.write(\"\\n\") parser = Parser(config_files=[config_path]) parser.parse_args([]) assert parser.required", "parser.forks == 8 def test_inheritance(): class AddressPort(argclass.Group): address: str port: int class Parser(argclass.Parser,", "Parser() parser.parse_args([]) assert parser.optional is None parser.parse_args([\"--optional=20\"]) assert parser.optional == 20 def test_minimal_required(tmp_path):", "assert parser.required == 20 def test_log_group(): class LogGroup(argclass.Group): level: int = argclass.LogLevel format", "9876 def test_config_for_required(tmp_path): class Parser(argclass.Parser): required: int = argclass.Argument(required=True) config_path = tmp_path /", "Parser() parser.parse_args([]) assert parser.optional is None parser.parse_args([\"--optional=10\"]) assert parser.optional == 10 def test_optional_is_not_required(tmp_path):", "in captured.out assert \"--foo FOO\" in captured.out assert \"[--bar BAR]\" in captured.out def", "parser.parse_args([\"--address=0.0.0.0\", \"--port=9876\"]) assert parser.address == \"0.0.0.0\" assert parser.port == 9876 def test_config_for_required(tmp_path): class", "open(conf_file, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = [1, 2, 3, 4]\\n\") parser =", "class Parser(argclass.Parser): foo: str = argclass.Argument(help=\"foo\") http: HostPortGroup = HostPortGroup( title=\"HTTP host and", "_ = parser.foo def test_environment(self, request: pytest.FixtureRequest): prefix = re.sub(r\"\\d+\", \"\", uuid.uuid4().hex +", "\"\", uuid.uuid4().hex + uuid.uuid4().hex).upper() class Parser(argclass.Parser): foo: str = argclass.Argument(env_var=env_var) expected = uuid.uuid4().hex", "== 20 def test_log_group(): class LogGroup(argclass.Group): level: int = argclass.LogLevel format = argclass.Argument(", "= True pool_size: int = 4 forks: int = 2 parser = Parser()", "== [4] def test_group_aliases(): class Group(argclass.Group): foo: str = argclass.Argument(\"-F\") class Parser(argclass.Parser): group", "accumulator\", ) accumulate = argclass.Argument( \"--sum\", action=argclass.Actions.STORE_CONST, const=sum, default=max, help=\"sum the integers (default:", "nargs=argclass.Nargs.ZERO_OR_MORE, type=int, ) bar: int = argclass.Argument(nargs=\"*\") spam: int = argclass.Argument(nargs=1) parser =", "Parser() parser.parse_args([\"-F\", \"egg\"]) assert parser.group.foo == \"egg\" def test_short_parser_definition(): class Parser(argclass.Parser): foo: str", "parser.parse_args([\"--foo=spam\", \"--bar=1\"]) assert parser.foo == \"spam\" assert parser.bar == 1 def test_print_help(capsys: pytest.CaptureFixture):", "self.Parser() parser.parse_args([\"--foo\", \"bar\"]) assert parser.foo == \"bar\" parser.parse_args([ \"--foo=bar\", \"--api-host=127.0.0.1\", \"--api-port=8080\", \"--grpc-host=127.0.0.2\", \"--grpc-port=9000\",", "== 8 def test_inheritance(): class AddressPort(argclass.Group): address: str port: int class Parser(argclass.Parser, AddressPort):", "args_set: FrozenSet[int] = argclass.Argument( type=int, nargs=\"*\", converter=frozenset ) parser = Parser() parser.parse_args([]) assert", "parser = Parser() parser.parse_args([]) assert parser.log.level == logging.ERROR assert parser.log.format == \"json\" def", "parser.log.level == logging.INFO assert parser.log.format == \"stream\" parser.parse_args([\"--log-level=debug\", \"--log-format=json\"]) assert parser.log.level == logging.DEBUG", "/ \"config.ini\" with open(conf_file, \"w\") as fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = {1, 2, 3,", "fp.write(\"[DEFAULT]\\n\") fp.write(\"required = 10\\n\") fp.write(\"\\n\") parser = Parser(config_files=[config_path]) parser.parse_args([]) assert parser.required == 10", "fp: fp.write(\"[DEFAULT]\\n\") fp.write(\"nargs = [1, 2, 3, 4]\\n\") parser = Parser(config_files=[conf_file]) parser.parse_args([]) assert", "parser.parse_args([]) assert parser.optional is None parser.parse_args([\"--optional=20\"]) assert parser.optional == 20 def test_minimal_required(tmp_path): class", "\"--pool-size=2\", \"--confused-default\", ]) assert parser.debug is True assert parser.confused_default is False assert parser.pool_size", "argclass.Argument(nargs=1) parser = Parser() parser.parse_args([\"--foo\", \"1\", \"2\", \"--bar=3\", \"--spam=4\"]) assert parser.foo == [1,", "parser.args_set == frozenset([1, 2, 3, 4, 5]) def test_nargs_1(): class Parser(argclass.Parser): args_set: FrozenSet[int]" ]
[ "= self.create_topic() type2 = self.create_topic() self.assertEqual(0, association.get_role_types().count()) role1 = association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_role_types().count())", "association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role3.remove() self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types())", "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", "role3.remove() self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) role2.remove() self.assertEqual(0, association.get_roles(type2).count()) role1.remove() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0,", "= association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_roles(type1).count()) self.assertTrue(role1 in association.get_roles(type1)) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role2", "new topic maps to be created with no ' + 'associations') association =", "for the Association model. Most if not all of these tests are ported", "association.create_role(type2, self.create_topic()) self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) role3 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_roles(type2).count())", "no roles in a newly created association') role_type = self.create_topic() player = self.create_topic()", "association = self.create_association() self.assertEqual(0, association.get_roles().count()) self.assertRaises(ModelConstraintException, association.create_role, self.create_topic(), None) def test_role_creation_invalid_type (self): association", "association.get_role_types()) self.assertFalse(type2 in association.get_role_types()) role1.remove() self.assertEqual(0, association.get_role_types().count()) def test_role_filter (self): association = self.create_association()", "self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role1 = association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_roles(type1).count()) self.assertTrue(role1", "with the TMAPI 2.0 distribution (http://www.tmapi.org/2.0/). \"\"\" from tmapi.exceptions import ModelConstraintException from tmapi_test_case", "this file except in compliance with the License. # You may obtain a", "self.assertRaises(ModelConstraintException, association.create_role, self.create_topic(), None) def test_role_creation_invalid_type (self): association = self.create_association() self.assertEqual(0, association.get_roles().count()) self.assertRaises(ModelConstraintException,", "under the License. \"\"\"Module containing tests for the Association model. Most if not", "association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) role3 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2))", "= self.tms.create_topic_map( 'http://www.tmapi.org/test/assoc/parent') self.assertEqual(parent.get_associations().count(), 0, 'Expected new topic maps to be created with", "size to decrement for ' + 'topic map') def test_role_creation (self): association =", "self.create_topic() type2 = self.create_topic() unused_type = self.create_topic() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count())", "self.assertTrue(role2 in association.get_roles(type2)) self.assertTrue(role3 in association.get_roles(type2)) self.assertEqual(0, association.get_roles(unused_type).count()) role3.remove() self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in", "get_associations()') association.remove() self.assertEqual(0, parent.get_associations().count(), 'Expected association list size to decrement for ' +", "association.get_roles(type2)) self.assertEqual(0, association.get_roles(unused_type).count()) role3.remove() self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) role2.remove() self.assertEqual(0, association.get_roles(type2).count()) role1.remove()", "(self): # This test is not applicable in this implementation. pass def test_role_creation_invalid_player", "role2.remove() self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertFalse(type2 in association.get_role_types()) role1.remove() self.assertEqual(0, association.get_role_types().count()) def", "pass def test_role_creation_invalid_player (self): association = self.create_association() self.assertEqual(0, association.get_roles().count()) self.assertRaises(ModelConstraintException, association.create_role, self.create_topic(), None)", "ANY KIND, either express or implied. # See the License for the specific", "in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role3.remove() self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in", "self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role3 = association.create_role(type2, self.create_topic()) self.assertEqual(2,", "association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role3.remove() self.assertEqual(2, association.get_role_types().count())", "in association.get_roles(type2)) role3 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) self.assertTrue(role3 in", "test_role_creation (self): association = self.create_association() self.assertEqual(0, association.get_roles().count(), 'Expected no roles in a newly", "association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) role2 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count())", "self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role1 = association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_roles(type1).count()) self.assertTrue(role1 in association.get_roles(type1))", "def test_parent (self): parent = self.tms.create_topic_map( 'http://www.tmapi.org/test/assoc/parent') self.assertEqual(parent.get_associations().count(), 0, 'Expected new topic maps", "association.get_roles(unused_type).count()) role2 = association.create_role(type2, self.create_topic()) self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) role3 = association.create_role(type2,", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "= self.create_association() self.assertEqual(0, association.get_roles().count(), 'Expected no roles in a newly created association') role_type", "self.assertTrue(association in parent.get_associations(), 'Association is not part of get_associations()') association.remove() self.assertEqual(0, parent.get_associations().count(), 'Expected", "model. Most if not all of these tests are ported from the public", "association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertFalse(type2 in association.get_role_types()) role1.remove() self.assertEqual(0, association.get_role_types().count()) def test_role_filter (self):", "association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role2.remove() self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertFalse(type2 in association.get_role_types())", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "self.create_association() self.assertEqual(0, association.get_roles().count()) self.assertRaises(ModelConstraintException, association.create_role, self.create_topic(), None) def test_role_creation_invalid_type (self): association = self.create_association()", "and # limitations under the License. \"\"\"Module containing tests for the Association model.", "tests for the Association model. Most if not all of these tests are", "the License. \"\"\"Module containing tests for the Association model. Most if not all", "self.assertEqual(0, association.get_roles(unused_type).count()) def test_role_filter_illegal (self): # This test is not applicable in this", "OF ANY KIND, either express or implied. # See the License for the", "role3 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role3.remove()", "self.assertEqual(2, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) self.assertTrue(role3 in association.get_roles(type2)) self.assertEqual(0, association.get_roles(unused_type).count()) role3.remove() self.assertEqual(1, association.get_roles(type2).count())", "self.create_topic() self.assertEqual(0, association.get_role_types().count()) role1 = association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) role2", "maps to be created with no ' + 'associations') association = parent.create_association(parent.create_topic()) self.assertEqual(parent,", "from the public domain tests that come with the TMAPI 2.0 distribution (http://www.tmapi.org/2.0/).", "role2 = association.create_role(type2, self.create_topic()) self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) role3 = association.create_role(type2, self.create_topic())", "License. \"\"\"Module containing tests for the Association model. Most if not all of", "self.create_topic() unused_type = self.create_topic() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role1 = association.create_role(type1,", "map') def test_role_creation (self): association = self.create_association() self.assertEqual(0, association.get_roles().count(), 'Expected no roles in", "of these tests are ported from the public domain tests that come with", "role.get_player(), 'Unexpected role player') self.assertEqual(1, player.get_roles_played().count()) self.assertTrue(role in player.get_roles_played()) def test_role_types (self): association", "'Expected association list size to decrement for ' + 'topic map') def test_role_creation", "association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_roles(type1).count()) self.assertTrue(role1 in association.get_roles(type1)) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role2 =", "' + 'association parent after creation') self.assertEqual(1, parent.get_associations().count(), 'Expected association list size to", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "association.get_roles().count(), 'Expected no roles in a newly created association') role_type = self.create_topic() player", "'associations') association = parent.create_association(parent.create_topic()) self.assertEqual(parent, association.get_parent(), 'Unexpected ' + 'association parent after creation')", "self.assertEqual(0, association.get_roles(unused_type).count()) role3.remove() self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) role2.remove() self.assertEqual(0, association.get_roles(type2).count()) role1.remove() self.assertEqual(0,", "specific language governing permissions and # limitations under the License. \"\"\"Module containing tests", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "tests that come with the TMAPI 2.0 distribution (http://www.tmapi.org/2.0/). \"\"\" from tmapi.exceptions import", "<NAME> (<EMAIL>) # # Licensed under the Apache License, Version 2.0 (the \"License\");", "\"\"\"Module containing tests for the Association model. Most if not all of these", "self.assertTrue(role2 in association.get_roles(type2)) role3 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) self.assertTrue(role3", "in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role2.remove() self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertFalse(type2 in", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "'Expected no roles in a newly created association') role_type = self.create_topic() player =", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "(self): association = self.create_association() type1 = self.create_topic() type2 = self.create_topic() unused_type = self.create_topic()", "all of these tests are ported from the public domain tests that come", "required by applicable law or agreed to in writing, software # distributed under", "self.assertTrue(type2 in association.get_role_types()) role2.remove() self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertFalse(type2 in association.get_role_types()) role1.remove()", "role_type = self.create_topic() player = self.create_topic() self.assertEqual(0, player.get_roles_played().count()) role = association.create_role(role_type, player) self.assertEqual(role_type,", "is not part of get_associations()') association.remove() self.assertEqual(0, parent.get_associations().count(), 'Expected association list size to", "applicable law or agreed to in writing, software # distributed under the License", "self.create_topic() type2 = self.create_topic() self.assertEqual(0, association.get_role_types().count()) role1 = association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1", "player') self.assertEqual(1, player.get_roles_played().count()) self.assertTrue(role in player.get_roles_played()) def test_role_types (self): association = self.create_association() type1", "'topic map') def test_role_creation (self): association = self.create_association() self.assertEqual(0, association.get_roles().count(), 'Expected no roles", "in association.get_roles(type1)) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role2 = association.create_role(type2, self.create_topic()) self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2", "in association.get_roles(type2)) self.assertEqual(0, association.get_roles(unused_type).count()) role3.remove() self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) role2.remove() self.assertEqual(0, association.get_roles(type2).count())", "type') self.assertEqual(player, role.get_player(), 'Unexpected role player') self.assertEqual(1, player.get_roles_played().count()) self.assertTrue(role in player.get_roles_played()) def test_role_types", "or agreed to in writing, software # distributed under the License is distributed", "public domain tests that come with the TMAPI 2.0 distribution (http://www.tmapi.org/2.0/). \"\"\" from", "self.assertTrue(type1 in association.get_role_types()) role2 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2", "= self.create_topic() type2 = self.create_topic() unused_type = self.create_topic() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0,", "+ 'topic map') def test_role_creation (self): association = self.create_association() self.assertEqual(0, association.get_roles().count(), 'Expected no", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role3 = association.create_role(type2,", "role1 = association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_roles(type1).count()) self.assertTrue(role1 in association.get_roles(type1)) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count())", "implementation. pass def test_role_creation_invalid_player (self): association = self.create_association() self.assertEqual(0, association.get_roles().count()) self.assertRaises(ModelConstraintException, association.create_role, self.create_topic(),", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "(self): association = self.create_association() self.assertEqual(0, association.get_roles().count(), 'Expected no roles in a newly created", "writing, software # distributed under the License is distributed on an \"AS IS\"", "self.create_topic() player = self.create_topic() self.assertEqual(0, player.get_roles_played().count()) role = association.create_role(role_type, player) self.assertEqual(role_type, role.get_type(), 'Unexpected", "ModelConstraintException from tmapi_test_case import TMAPITestCase class AssociationTest (TMAPITestCase): def test_parent (self): parent =", "self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role2.remove() self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertFalse(type2", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "self.tms.create_topic_map( 'http://www.tmapi.org/test/assoc/parent') self.assertEqual(parent.get_associations().count(), 0, 'Expected new topic maps to be created with no", "to be created with no ' + 'associations') association = parent.create_association(parent.create_topic()) self.assertEqual(parent, association.get_parent(),", "newly created association') role_type = self.create_topic() player = self.create_topic() self.assertEqual(0, player.get_roles_played().count()) role =", "License. # You may obtain a copy of the License at # #", "decrement for ' + 'topic map') def test_role_creation (self): association = self.create_association() self.assertEqual(0,", "parent.get_associations(), 'Association is not part of get_associations()') association.remove() self.assertEqual(0, parent.get_associations().count(), 'Expected association list", "association = self.create_association() self.assertEqual(0, association.get_roles().count(), 'Expected no roles in a newly created association')", "compliance with the License. # You may obtain a copy of the License", "created with no ' + 'associations') association = parent.create_association(parent.create_topic()) self.assertEqual(parent, association.get_parent(), 'Unexpected '", "to decrement for ' + 'topic map') def test_role_creation (self): association = self.create_association()", "\"\"\" from tmapi.exceptions import ModelConstraintException from tmapi_test_case import TMAPITestCase class AssociationTest (TMAPITestCase): def", "association.get_roles(unused_type).count()) def test_role_filter_illegal (self): # This test is not applicable in this implementation.", "from tmapi.exceptions import ModelConstraintException from tmapi_test_case import TMAPITestCase class AssociationTest (TMAPITestCase): def test_parent", "after creation') self.assertEqual(1, parent.get_associations().count(), 'Expected association list size to increment for ' +", "topic maps to be created with no ' + 'associations') association = parent.create_association(parent.create_topic())", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "parent after creation') self.assertEqual(1, parent.get_associations().count(), 'Expected association list size to increment for '", "TMAPITestCase class AssociationTest (TMAPITestCase): def test_parent (self): parent = self.tms.create_topic_map( 'http://www.tmapi.org/test/assoc/parent') self.assertEqual(parent.get_associations().count(), 0,", "roles in a newly created association') role_type = self.create_topic() player = self.create_topic() self.assertEqual(0,", "self.assertTrue(role3 in association.get_roles(type2)) self.assertEqual(0, association.get_roles(unused_type).count()) role3.remove() self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) role2.remove() self.assertEqual(0,", "role1.remove() self.assertEqual(0, association.get_role_types().count()) def test_role_filter (self): association = self.create_association() type1 = self.create_topic() type2", "a newly created association') role_type = self.create_topic() player = self.create_topic() self.assertEqual(0, player.get_roles_played().count()) role", "= association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) role2 = association.create_role(type2, self.create_topic()) self.assertEqual(2,", "= self.create_topic() player = self.create_topic() self.assertEqual(0, player.get_roles_played().count()) role = association.create_role(role_type, player) self.assertEqual(role_type, role.get_type(),", "self.assertEqual(1, parent.get_associations().count(), 'Expected association list size to increment for ' + 'topic map')", "' + 'topic map') def test_role_creation (self): association = self.create_association() self.assertEqual(0, association.get_roles().count(), 'Expected", "# This test is not applicable in this implementation. pass def test_role_creation_invalid_player (self):", "not use this file except in compliance with the License. # You may", "# limitations under the License. \"\"\"Module containing tests for the Association model. Most", "This test is not applicable in this implementation. pass def test_role_creation_invalid_player (self): association", "License, Version 2.0 (the \"License\"); # you may not use this file except", "= self.create_association() type1 = self.create_topic() type2 = self.create_topic() self.assertEqual(0, association.get_role_types().count()) role1 = association.create_role(type1,", "TMAPI 2.0 distribution (http://www.tmapi.org/2.0/). \"\"\" from tmapi.exceptions import ModelConstraintException from tmapi_test_case import TMAPITestCase", "association.get_role_types()) role3.remove() self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role2.remove() self.assertEqual(1, association.get_role_types().count())", "in association.get_role_types()) role3.remove() self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role2.remove() self.assertEqual(1,", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "+ 'association parent after creation') self.assertEqual(1, parent.get_associations().count(), 'Expected association list size to increment", "tmapi.exceptions import ModelConstraintException from tmapi_test_case import TMAPITestCase class AssociationTest (TMAPITestCase): def test_parent (self):", "= self.create_topic() self.assertEqual(0, association.get_role_types().count()) role1 = association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types())", "# you may not use this file except in compliance with the License.", "self.assertEqual(0, association.get_roles().count()) self.assertRaises(ModelConstraintException, association.create_role, self.create_topic(), None) def test_role_creation_invalid_type (self): association = self.create_association() self.assertEqual(0,", "in association.get_roles(type2)) self.assertTrue(role3 in association.get_roles(type2)) self.assertEqual(0, association.get_roles(unused_type).count()) role3.remove() self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2))", "governing permissions and # limitations under the License. \"\"\"Module containing tests for the", "tests are ported from the public domain tests that come with the TMAPI", "agreed to in writing, software # distributed under the License is distributed on", "association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role1 = association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_roles(type1).count()) self.assertTrue(role1 in", "(the \"License\"); # you may not use this file except in compliance with", "self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) role2 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in", "self.create_topic() self.assertEqual(0, player.get_roles_played().count()) role = association.create_role(role_type, player) self.assertEqual(role_type, role.get_type(), 'Unexpected role type') self.assertEqual(player,", "self.assertEqual(parent, association.get_parent(), 'Unexpected ' + 'association parent after creation') self.assertEqual(1, parent.get_associations().count(), 'Expected association", "# Unless required by applicable law or agreed to in writing, software #", "these tests are ported from the public domain tests that come with the", "by applicable law or agreed to in writing, software # distributed under the", "role.get_type(), 'Unexpected role type') self.assertEqual(player, role.get_player(), 'Unexpected role player') self.assertEqual(1, player.get_roles_played().count()) self.assertTrue(role in", "self.create_association() type1 = self.create_topic() type2 = self.create_topic() unused_type = self.create_topic() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0,", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "parent.get_associations().count(), 'Expected association list size to increment for ' + 'topic map') self.assertTrue(association", "self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role3.remove() self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in", "this implementation. pass def test_role_creation_invalid_player (self): association = self.create_association() self.assertEqual(0, association.get_roles().count()) self.assertRaises(ModelConstraintException, association.create_role,", "list size to increment for ' + 'topic map') self.assertTrue(association in parent.get_associations(), 'Association", "role type') self.assertEqual(player, role.get_player(), 'Unexpected role player') self.assertEqual(1, player.get_roles_played().count()) self.assertTrue(role in player.get_roles_played()) def", "type2 = self.create_topic() unused_type = self.create_topic() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role1", "self.assertEqual(0, association.get_role_types().count()) def test_role_filter (self): association = self.create_association() type1 = self.create_topic() type2 =", "association = parent.create_association(parent.create_topic()) self.assertEqual(parent, association.get_parent(), 'Unexpected ' + 'association parent after creation') self.assertEqual(1,", "AssociationTest (TMAPITestCase): def test_parent (self): parent = self.tms.create_topic_map( 'http://www.tmapi.org/test/assoc/parent') self.assertEqual(parent.get_associations().count(), 0, 'Expected new", "file except in compliance with the License. # You may obtain a copy", "role2.remove() self.assertEqual(0, association.get_roles(type2).count()) role1.remove() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(unused_type).count()) def test_role_filter_illegal (self): # This", "role = association.create_role(role_type, player) self.assertEqual(role_type, role.get_type(), 'Unexpected role type') self.assertEqual(player, role.get_player(), 'Unexpected role", "map') self.assertTrue(association in parent.get_associations(), 'Association is not part of get_associations()') association.remove() self.assertEqual(0, parent.get_associations().count(),", "License for the specific language governing permissions and # limitations under the License.", "list size to decrement for ' + 'topic map') def test_role_creation (self): association", "= self.create_association() type1 = self.create_topic() type2 = self.create_topic() unused_type = self.create_topic() self.assertEqual(0, association.get_roles(type1).count())", "to in writing, software # distributed under the License is distributed on an", "implied. # See the License for the specific language governing permissions and #", "\"License\"); # you may not use this file except in compliance with the", "size to increment for ' + 'topic map') self.assertTrue(association in parent.get_associations(), 'Association is", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "for the specific language governing permissions and # limitations under the License. \"\"\"Module", "not part of get_associations()') association.remove() self.assertEqual(0, parent.get_associations().count(), 'Expected association list size to decrement", "containing tests for the Association model. Most if not all of these tests", "or implied. # See the License for the specific language governing permissions and", "player) self.assertEqual(role_type, role.get_type(), 'Unexpected role type') self.assertEqual(player, role.get_player(), 'Unexpected role player') self.assertEqual(1, player.get_roles_played().count())", "player.get_roles_played()) def test_role_types (self): association = self.create_association() type1 = self.create_topic() type2 = self.create_topic()", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "self.assertEqual(player, role.get_player(), 'Unexpected role player') self.assertEqual(1, player.get_roles_played().count()) self.assertTrue(role in player.get_roles_played()) def test_role_types (self):", "self.assertEqual(parent.get_associations().count(), 0, 'Expected new topic maps to be created with no ' +", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "not applicable in this implementation. pass def test_role_creation_invalid_player (self): association = self.create_association() self.assertEqual(0,", "in writing, software # distributed under the License is distributed on an \"AS", "association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role3 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count())", "= self.create_topic() self.assertEqual(0, player.get_roles_played().count()) role = association.create_role(role_type, player) self.assertEqual(role_type, role.get_type(), 'Unexpected role type')", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "' + 'topic map') self.assertTrue(association in parent.get_associations(), 'Association is not part of get_associations()')", "self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role3 = association.create_role(type2, self.create_topic())", "ported from the public domain tests that come with the TMAPI 2.0 distribution", "= parent.create_association(parent.create_topic()) self.assertEqual(parent, association.get_parent(), 'Unexpected ' + 'association parent after creation') self.assertEqual(1, parent.get_associations().count(),", "(http://www.tmapi.org/2.0/). \"\"\" from tmapi.exceptions import ModelConstraintException from tmapi_test_case import TMAPITestCase class AssociationTest (TMAPITestCase):", "from tmapi_test_case import TMAPITestCase class AssociationTest (TMAPITestCase): def test_parent (self): parent = self.tms.create_topic_map(", "in parent.get_associations(), 'Association is not part of get_associations()') association.remove() self.assertEqual(0, parent.get_associations().count(), 'Expected association", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "self.assertEqual(0, player.get_roles_played().count()) role = association.create_role(role_type, player) self.assertEqual(role_type, role.get_type(), 'Unexpected role type') self.assertEqual(player, role.get_player(),", "you may not use this file except in compliance with the License. #", "def test_role_creation_invalid_player (self): association = self.create_association() self.assertEqual(0, association.get_roles().count()) self.assertRaises(ModelConstraintException, association.create_role, self.create_topic(), None) def", "= association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) self.assertTrue(role3 in association.get_roles(type2)) self.assertEqual(0, association.get_roles(unused_type).count())", "class AssociationTest (TMAPITestCase): def test_parent (self): parent = self.tms.create_topic_map( 'http://www.tmapi.org/test/assoc/parent') self.assertEqual(parent.get_associations().count(), 0, 'Expected", "self.assertEqual(0, association.get_role_types().count()) role1 = association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) role2 =", "association.get_role_types()) role3 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types())", "use this file except in compliance with the License. # You may obtain", "association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) self.assertTrue(role3 in association.get_roles(type2)) self.assertEqual(0, association.get_roles(unused_type).count()) role3.remove()", "self.assertTrue(role1 in association.get_roles(type1)) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role2 = association.create_role(type2, self.create_topic()) self.assertEqual(1, association.get_roles(type2).count())", "self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role2.remove() self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in", "association list size to decrement for ' + 'topic map') def test_role_creation (self):", "test_role_creation_invalid_player (self): association = self.create_association() self.assertEqual(0, association.get_roles().count()) self.assertRaises(ModelConstraintException, association.create_role, self.create_topic(), None) def test_role_creation_invalid_type", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(unused_type).count()) def test_role_filter_illegal (self): # This test is not applicable in", "(self): parent = self.tms.create_topic_map( 'http://www.tmapi.org/test/assoc/parent') self.assertEqual(parent.get_associations().count(), 0, 'Expected new topic maps to be", "in a newly created association') role_type = self.create_topic() player = self.create_topic() self.assertEqual(0, player.get_roles_played().count())", "association.get_roles(type2)) role3 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) self.assertTrue(role3 in association.get_roles(type2))", "creation') self.assertEqual(1, parent.get_associations().count(), 'Expected association list size to increment for ' + 'topic", "permissions and # limitations under the License. \"\"\"Module containing tests for the Association", "2.0 (the \"License\"); # you may not use this file except in compliance", "+ 'associations') association = parent.create_association(parent.create_topic()) self.assertEqual(parent, association.get_parent(), 'Unexpected ' + 'association parent after", "association.get_roles(type1).count()) self.assertTrue(role1 in association.get_roles(type1)) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role2 = association.create_role(type2, self.create_topic()) self.assertEqual(1,", "2.0 distribution (http://www.tmapi.org/2.0/). \"\"\" from tmapi.exceptions import ModelConstraintException from tmapi_test_case import TMAPITestCase class", "def test_role_filter (self): association = self.create_association() type1 = self.create_topic() type2 = self.create_topic() unused_type", "association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) role2.remove() self.assertEqual(0, association.get_roles(type2).count()) role1.remove() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(unused_type).count()) def", "association.create_role(role_type, player) self.assertEqual(role_type, role.get_type(), 'Unexpected role type') self.assertEqual(player, role.get_player(), 'Unexpected role player') self.assertEqual(1,", "association.get_role_types().count()) role1 = association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) role2 = association.create_role(type2,", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "# Copyright 2011 <NAME> (<EMAIL>) # # Licensed under the Apache License, Version", "in association.get_role_types()) role2.remove() self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertFalse(type2 in association.get_role_types()) role1.remove() self.assertEqual(0,", "# # Unless required by applicable law or agreed to in writing, software", "self.create_topic()) self.assertEqual(2, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) self.assertTrue(role3 in association.get_roles(type2)) self.assertEqual(0, association.get_roles(unused_type).count()) role3.remove() self.assertEqual(1,", "distribution (http://www.tmapi.org/2.0/). \"\"\" from tmapi.exceptions import ModelConstraintException from tmapi_test_case import TMAPITestCase class AssociationTest", "express or implied. # See the License for the specific language governing permissions", "association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) self.assertTrue(role3 in association.get_roles(type2)) self.assertEqual(0, association.get_roles(unused_type).count()) role3.remove() self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2", "association = self.create_association() type1 = self.create_topic() type2 = self.create_topic() unused_type = self.create_topic() self.assertEqual(0,", "association.get_roles(unused_type).count()) role3.remove() self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) role2.remove() self.assertEqual(0, association.get_roles(type2).count()) role1.remove() self.assertEqual(0, association.get_roles(type1).count())", "self.assertEqual(0, parent.get_associations().count(), 'Expected association list size to decrement for ' + 'topic map')", "= association.create_role(type2, self.create_topic()) self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) role3 = association.create_role(type2, self.create_topic()) self.assertEqual(2,", "tmapi_test_case import TMAPITestCase class AssociationTest (TMAPITestCase): def test_parent (self): parent = self.tms.create_topic_map( 'http://www.tmapi.org/test/assoc/parent')", "either express or implied. # See the License for the specific language governing", "+ 'topic map') self.assertTrue(association in parent.get_associations(), 'Association is not part of get_associations()') association.remove()", "role player') self.assertEqual(1, player.get_roles_played().count()) self.assertTrue(role in player.get_roles_played()) def test_role_types (self): association = self.create_association()", "the public domain tests that come with the TMAPI 2.0 distribution (http://www.tmapi.org/2.0/). \"\"\"", "association.get_role_types()) role2.remove() self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertFalse(type2 in association.get_role_types()) role1.remove() self.assertEqual(0, association.get_role_types().count())", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "player = self.create_topic() self.assertEqual(0, player.get_roles_played().count()) role = association.create_role(role_type, player) self.assertEqual(role_type, role.get_type(), 'Unexpected role", "self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role3.remove() self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1", "association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role3.remove() self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types())", "self.assertFalse(type2 in association.get_role_types()) role1.remove() self.assertEqual(0, association.get_role_types().count()) def test_role_filter (self): association = self.create_association() type1", "to increment for ' + 'topic map') self.assertTrue(association in parent.get_associations(), 'Association is not", "in association.get_role_types()) role3 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in", "the License. # You may obtain a copy of the License at #", "in player.get_roles_played()) def test_role_types (self): association = self.create_association() type1 = self.create_topic() type2 =", "self.assertTrue(type1 in association.get_role_types()) self.assertFalse(type2 in association.get_role_types()) role1.remove() self.assertEqual(0, association.get_role_types().count()) def test_role_filter (self): association", "that come with the TMAPI 2.0 distribution (http://www.tmapi.org/2.0/). \"\"\" from tmapi.exceptions import ModelConstraintException", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "' + 'associations') association = parent.create_association(parent.create_topic()) self.assertEqual(parent, association.get_parent(), 'Unexpected ' + 'association parent", "in association.get_role_types()) role2 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "parent = self.tms.create_topic_map( 'http://www.tmapi.org/test/assoc/parent') self.assertEqual(parent.get_associations().count(), 0, 'Expected new topic maps to be created", "association.get_role_types()) role2 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types())", "test is not applicable in this implementation. pass def test_role_creation_invalid_player (self): association =", "'association parent after creation') self.assertEqual(1, parent.get_associations().count(), 'Expected association list size to increment for", "'Unexpected role player') self.assertEqual(1, player.get_roles_played().count()) self.assertTrue(role in player.get_roles_played()) def test_role_types (self): association =", "parent.get_associations().count(), 'Expected association list size to decrement for ' + 'topic map') def", "'Unexpected role type') self.assertEqual(player, role.get_player(), 'Unexpected role player') self.assertEqual(1, player.get_roles_played().count()) self.assertTrue(role in player.get_roles_played())", "self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role2 = association.create_role(type2, self.create_topic()) self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2))", "role3.remove() self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role2.remove() self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1", "unused_type = self.create_topic() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role1 = association.create_role(type1, self.create_topic())", "Most if not all of these tests are ported from the public domain", "with no ' + 'associations') association = parent.create_association(parent.create_topic()) self.assertEqual(parent, association.get_parent(), 'Unexpected ' +", "Copyright 2011 <NAME> (<EMAIL>) # # Licensed under the Apache License, Version 2.0", "role3 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) self.assertTrue(role3 in association.get_roles(type2)) self.assertEqual(0,", "'http://www.tmapi.org/test/assoc/parent') self.assertEqual(parent.get_associations().count(), 0, 'Expected new topic maps to be created with no '", "self.assertEqual(role_type, role.get_type(), 'Unexpected role type') self.assertEqual(player, role.get_player(), 'Unexpected role player') self.assertEqual(1, player.get_roles_played().count()) self.assertTrue(role", "0, 'Expected new topic maps to be created with no ' + 'associations')", "association.get_roles(type2)) self.assertTrue(role3 in association.get_roles(type2)) self.assertEqual(0, association.get_roles(unused_type).count()) role3.remove() self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) role2.remove()", "with the License. # You may obtain a copy of the License at", "association.get_role_types()) role1.remove() self.assertEqual(0, association.get_role_types().count()) def test_role_filter (self): association = self.create_association() type1 = self.create_topic()", "self.create_topic()) self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) role2 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1", "in association.get_roles(type2)) role2.remove() self.assertEqual(0, association.get_roles(type2).count()) role1.remove() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(unused_type).count()) def test_role_filter_illegal (self):", "in association.get_role_types()) self.assertFalse(type2 in association.get_role_types()) role1.remove() self.assertEqual(0, association.get_role_types().count()) def test_role_filter (self): association =", "self.create_topic() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role1 = association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_roles(type1).count())", "type1 = self.create_topic() type2 = self.create_topic() unused_type = self.create_topic() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(type2).count())", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "if not all of these tests are ported from the public domain tests", "'topic map') self.assertTrue(association in parent.get_associations(), 'Association is not part of get_associations()') association.remove() self.assertEqual(0,", "association') role_type = self.create_topic() player = self.create_topic() self.assertEqual(0, player.get_roles_played().count()) role = association.create_role(role_type, player)", "in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role3 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in", "self.assertEqual(1, player.get_roles_played().count()) self.assertTrue(role in player.get_roles_played()) def test_role_types (self): association = self.create_association() type1 =", "self.assertEqual(0, association.get_roles().count(), 'Expected no roles in a newly created association') role_type = self.create_topic()", "law or agreed to in writing, software # distributed under the License is", "the License for the specific language governing permissions and # limitations under the", "of get_associations()') association.remove() self.assertEqual(0, parent.get_associations().count(), 'Expected association list size to decrement for '", "(self): association = self.create_association() type1 = self.create_topic() type2 = self.create_topic() self.assertEqual(0, association.get_role_types().count()) role1", "self.assertTrue(role2 in association.get_roles(type2)) role2.remove() self.assertEqual(0, association.get_roles(type2).count()) role1.remove() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(unused_type).count()) def test_role_filter_illegal", "self.assertEqual(0, association.get_roles(unused_type).count()) role1 = association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_roles(type1).count()) self.assertTrue(role1 in association.get_roles(type1)) self.assertEqual(0, association.get_roles(type2).count())", "self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertFalse(type2 in association.get_role_types()) role1.remove() self.assertEqual(0, association.get_role_types().count()) def test_role_filter", "= self.create_topic() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role1 = association.create_role(type1, self.create_topic()) self.assertEqual(1,", "= association.create_role(role_type, player) self.assertEqual(role_type, role.get_type(), 'Unexpected role type') self.assertEqual(player, role.get_player(), 'Unexpected role player')", "self.assertEqual(0, association.get_roles(type2).count()) role1.remove() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(unused_type).count()) def test_role_filter_illegal (self): # This test", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "applicable in this implementation. pass def test_role_creation_invalid_player (self): association = self.create_association() self.assertEqual(0, association.get_roles().count())", "self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) role3 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_roles(type2).count()) self.assertTrue(role2 in", "role1 = association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) role2 = association.create_role(type2, self.create_topic())", "self.assertTrue(role in player.get_roles_played()) def test_role_types (self): association = self.create_association() type1 = self.create_topic() type2", "self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(unused_type).count()) def test_role_filter_illegal (self): # This test is not applicable", "association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role2 = association.create_role(type2, self.create_topic()) self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) role3", "the TMAPI 2.0 distribution (http://www.tmapi.org/2.0/). \"\"\" from tmapi.exceptions import ModelConstraintException from tmapi_test_case import", "in compliance with the License. # You may obtain a copy of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "self.assertEqual(0, association.get_roles(unused_type).count()) role2 = association.create_role(type2, self.create_topic()) self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) role3 =", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "import TMAPITestCase class AssociationTest (TMAPITestCase): def test_parent (self): parent = self.tms.create_topic_map( 'http://www.tmapi.org/test/assoc/parent') self.assertEqual(parent.get_associations().count(),", "= association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role3.remove() self.assertEqual(2,", "test_role_filter (self): association = self.create_association() type1 = self.create_topic() type2 = self.create_topic() unused_type =", "association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role1 = association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_roles(type1).count()) self.assertTrue(role1 in association.get_roles(type1)) self.assertEqual(0,", "self.assertTrue(type2 in association.get_role_types()) role3 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2", "association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role3 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types())", "def test_role_creation (self): association = self.create_association() self.assertEqual(0, association.get_roles().count(), 'Expected no roles in a", "be created with no ' + 'associations') association = parent.create_association(parent.create_topic()) self.assertEqual(parent, association.get_parent(), 'Unexpected", "2011 <NAME> (<EMAIL>) # # Licensed under the Apache License, Version 2.0 (the", "See the License for the specific language governing permissions and # limitations under", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "come with the TMAPI 2.0 distribution (http://www.tmapi.org/2.0/). \"\"\" from tmapi.exceptions import ModelConstraintException from", "created association') role_type = self.create_topic() player = self.create_topic() self.assertEqual(0, player.get_roles_played().count()) role = association.create_role(role_type,", "test_role_types (self): association = self.create_association() type1 = self.create_topic() type2 = self.create_topic() self.assertEqual(0, association.get_role_types().count())", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "the specific language governing permissions and # limitations under the License. \"\"\"Module containing", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "are ported from the public domain tests that come with the TMAPI 2.0", "self.assertEqual(1, association.get_roles(type1).count()) self.assertTrue(role1 in association.get_roles(type1)) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role2 = association.create_role(type2, self.create_topic())", "association.get_role_types().count()) def test_role_filter (self): association = self.create_association() type1 = self.create_topic() type2 = self.create_topic()", "association.get_roles(type2)) role2.remove() self.assertEqual(0, association.get_roles(type2).count()) role1.remove() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(unused_type).count()) def test_role_filter_illegal (self): #", "Association model. Most if not all of these tests are ported from the", "self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role3.remove() self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2", "def test_role_filter_illegal (self): # This test is not applicable in this implementation. pass", "association.get_roles(type1)) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role2 = association.create_role(type2, self.create_topic()) self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in", "for ' + 'topic map') def test_role_creation (self): association = self.create_association() self.assertEqual(0, association.get_roles().count(),", "self.create_association() type1 = self.create_topic() type2 = self.create_topic() self.assertEqual(0, association.get_role_types().count()) role1 = association.create_role(type1, self.create_topic())", "self.create_topic(), None) def test_role_creation_invalid_type (self): association = self.create_association() self.assertEqual(0, association.get_roles().count()) self.assertRaises(ModelConstraintException, association.create_role, None,", "self.create_topic()) self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) role3 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_roles(type2).count()) self.assertTrue(role2", "'Association is not part of get_associations()') association.remove() self.assertEqual(0, parent.get_associations().count(), 'Expected association list size", "is not applicable in this implementation. pass def test_role_creation_invalid_player (self): association = self.create_association()", "self.assertEqual(1, association.get_roles(type2).count()) self.assertTrue(role2 in association.get_roles(type2)) role2.remove() self.assertEqual(0, association.get_roles(type2).count()) role1.remove() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(unused_type).count())", "for ' + 'topic map') self.assertTrue(association in parent.get_associations(), 'Association is not part of", "association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) role2 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types())", "def test_role_types (self): association = self.create_association() type1 = self.create_topic() type2 = self.create_topic() self.assertEqual(0,", "= self.create_topic() unused_type = self.create_topic() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role1 =", "self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role3 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1", "language governing permissions and # limitations under the License. \"\"\"Module containing tests for", "association.remove() self.assertEqual(0, parent.get_associations().count(), 'Expected association list size to decrement for ' + 'topic", "in this implementation. pass def test_role_creation_invalid_player (self): association = self.create_association() self.assertEqual(0, association.get_roles().count()) self.assertRaises(ModelConstraintException,", "type2 = self.create_topic() self.assertEqual(0, association.get_role_types().count()) role1 = association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in", "in association.get_role_types()) role1.remove() self.assertEqual(0, association.get_role_types().count()) def test_role_filter (self): association = self.create_association() type1 =", "Version 2.0 (the \"License\"); # you may not use this file except in", "except in compliance with the License. # You may obtain a copy of", "self.create_topic()) self.assertEqual(1, association.get_roles(type1).count()) self.assertTrue(role1 in association.get_roles(type1)) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0, association.get_roles(unused_type).count()) role2 = association.create_role(type2,", "(<EMAIL>) # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "association.get_parent(), 'Unexpected ' + 'association parent after creation') self.assertEqual(1, parent.get_associations().count(), 'Expected association list", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "test_parent (self): parent = self.tms.create_topic_map( 'http://www.tmapi.org/test/assoc/parent') self.assertEqual(parent.get_associations().count(), 0, 'Expected new topic maps to", "'Expected association list size to increment for ' + 'topic map') self.assertTrue(association in", "None) def test_role_creation_invalid_type (self): association = self.create_association() self.assertEqual(0, association.get_roles().count()) self.assertRaises(ModelConstraintException, association.create_role, None, self.create_topic())", "association.get_roles().count()) self.assertRaises(ModelConstraintException, association.create_role, self.create_topic(), None) def test_role_creation_invalid_type (self): association = self.create_association() self.assertEqual(0, association.get_roles().count())", "association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role2.remove() self.assertEqual(1, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types())", "type1 = self.create_topic() type2 = self.create_topic() self.assertEqual(0, association.get_role_types().count()) role1 = association.create_role(type1, self.create_topic()) self.assertEqual(1,", "player.get_roles_played().count()) role = association.create_role(role_type, player) self.assertEqual(role_type, role.get_type(), 'Unexpected role type') self.assertEqual(player, role.get_player(), 'Unexpected", "self.assertTrue(type2 in association.get_role_types()) role3.remove() self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role2.remove()", "not all of these tests are ported from the public domain tests that", "(self): association = self.create_association() self.assertEqual(0, association.get_roles().count()) self.assertRaises(ModelConstraintException, association.create_role, self.create_topic(), None) def test_role_creation_invalid_type (self):", "limitations under the License. \"\"\"Module containing tests for the Association model. Most if", "role1.remove() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(unused_type).count()) def test_role_filter_illegal (self): # This test is not", "domain tests that come with the TMAPI 2.0 distribution (http://www.tmapi.org/2.0/). \"\"\" from tmapi.exceptions", "'Expected new topic maps to be created with no ' + 'associations') association", "self.create_association() self.assertEqual(0, association.get_roles().count(), 'Expected no roles in a newly created association') role_type =", "(TMAPITestCase): def test_parent (self): parent = self.tms.create_topic_map( 'http://www.tmapi.org/test/assoc/parent') self.assertEqual(parent.get_associations().count(), 0, 'Expected new topic", "role2 = association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role3", "player.get_roles_played().count()) self.assertTrue(role in player.get_roles_played()) def test_role_types (self): association = self.create_association() type1 = self.create_topic()", "= association.create_role(type2, self.create_topic()) self.assertEqual(2, association.get_role_types().count()) self.assertTrue(type1 in association.get_role_types()) self.assertTrue(type2 in association.get_role_types()) role3 =", "association.get_roles(type2).count()) role1.remove() self.assertEqual(0, association.get_roles(type1).count()) self.assertEqual(0, association.get_roles(unused_type).count()) def test_role_filter_illegal (self): # This test is", "association list size to increment for ' + 'topic map') self.assertTrue(association in parent.get_associations(),", "association.create_role, self.create_topic(), None) def test_role_creation_invalid_type (self): association = self.create_association() self.assertEqual(0, association.get_roles().count()) self.assertRaises(ModelConstraintException, association.create_role,", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "parent.create_association(parent.create_topic()) self.assertEqual(parent, association.get_parent(), 'Unexpected ' + 'association parent after creation') self.assertEqual(1, parent.get_associations().count(), 'Expected", "no ' + 'associations') association = parent.create_association(parent.create_topic()) self.assertEqual(parent, association.get_parent(), 'Unexpected ' + 'association", "= self.create_association() self.assertEqual(0, association.get_roles().count()) self.assertRaises(ModelConstraintException, association.create_role, self.create_topic(), None) def test_role_creation_invalid_type (self): association =", "the Association model. Most if not all of these tests are ported from", "association.get_roles(unused_type).count()) role1 = association.create_role(type1, self.create_topic()) self.assertEqual(1, association.get_roles(type1).count()) self.assertTrue(role1 in association.get_roles(type1)) self.assertEqual(0, association.get_roles(type2).count()) self.assertEqual(0,", "increment for ' + 'topic map') self.assertTrue(association in parent.get_associations(), 'Association is not part", "part of get_associations()') association.remove() self.assertEqual(0, parent.get_associations().count(), 'Expected association list size to decrement for", "import ModelConstraintException from tmapi_test_case import TMAPITestCase class AssociationTest (TMAPITestCase): def test_parent (self): parent", "test_role_filter_illegal (self): # This test is not applicable in this implementation. pass def", "'Unexpected ' + 'association parent after creation') self.assertEqual(1, parent.get_associations().count(), 'Expected association list size", "association = self.create_association() type1 = self.create_topic() type2 = self.create_topic() self.assertEqual(0, association.get_role_types().count()) role1 =" ]
[ "# copied, modified, or distributed except according to those terms. import os __all__", "except according to those terms. import os __all__ = [ 'fig2files' ] def", "2.0, <LICENSE-APACHE or # http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or # http://opensource.org/licenses/MIT>,", "import os __all__ = [ 'fig2files' ] def fig2files(plt, dirname, filename, dpi=None): os.makedirs('{0}/png'.format(dirname),", "] def fig2files(plt, dirname, filename, dpi=None): os.makedirs('{0}/png'.format(dirname), exist_ok=True) plt.savefig('{0}/png/{1}.png'.format(dirname, filename), dpi=dpi) os.makedirs('{0}/eps'.format(dirname), exist_ok=True)", "dpi=None): os.makedirs('{0}/png'.format(dirname), exist_ok=True) plt.savefig('{0}/png/{1}.png'.format(dirname, filename), dpi=dpi) os.makedirs('{0}/eps'.format(dirname), exist_ok=True) plt.savefig('{0}/eps/{1}.eps'.format(dirname, filename), format='eps') os.makedirs('{0}/svg'.format(dirname), exist_ok=True)", "Version 2.0, <LICENSE-APACHE or # http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or #", "[ 'fig2files' ] def fig2files(plt, dirname, filename, dpi=None): os.makedirs('{0}/png'.format(dirname), exist_ok=True) plt.savefig('{0}/png/{1}.png'.format(dirname, filename), dpi=dpi)", "http://opensource.org/licenses/MIT>, at your option. This file may not be # copied, modified, or", "or the MIT license <LICENSE-MIT or # http://opensource.org/licenses/MIT>, at your option. This file", "or # http://opensource.org/licenses/MIT>, at your option. This file may not be # copied,", "'fig2files' ] def fig2files(plt, dirname, filename, dpi=None): os.makedirs('{0}/png'.format(dirname), exist_ok=True) plt.savefig('{0}/png/{1}.png'.format(dirname, filename), dpi=dpi) os.makedirs('{0}/eps'.format(dirname),", "This file may not be # copied, modified, or distributed except according to", "may not be # copied, modified, or distributed except according to those terms.", "copied, modified, or distributed except according to those terms. import os __all__ =", "Copyright 2021 portfolio-robustfpm-framework Authors # Licensed under the Apache License, Version 2.0, <LICENSE-APACHE", "license <LICENSE-MIT or # http://opensource.org/licenses/MIT>, at your option. This file may not be", "file may not be # copied, modified, or distributed except according to those", "terms. import os __all__ = [ 'fig2files' ] def fig2files(plt, dirname, filename, dpi=None):", "the Apache License, Version 2.0, <LICENSE-APACHE or # http://apache.org/licenses/LICENSE-2.0> or the MIT license", "according to those terms. import os __all__ = [ 'fig2files' ] def fig2files(plt,", "plt.savefig('{0}/png/{1}.png'.format(dirname, filename), dpi=dpi) os.makedirs('{0}/eps'.format(dirname), exist_ok=True) plt.savefig('{0}/eps/{1}.eps'.format(dirname, filename), format='eps') os.makedirs('{0}/svg'.format(dirname), exist_ok=True) plt.savefig('{0}/svg/{1}.svg'.format(dirname, filename), format='svg')", "MIT license <LICENSE-MIT or # http://opensource.org/licenses/MIT>, at your option. This file may not", "__all__ = [ 'fig2files' ] def fig2files(plt, dirname, filename, dpi=None): os.makedirs('{0}/png'.format(dirname), exist_ok=True) plt.savefig('{0}/png/{1}.png'.format(dirname,", "Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or # http://apache.org/licenses/LICENSE-2.0> or the", "or distributed except according to those terms. import os __all__ = [ 'fig2files'", "distributed except according to those terms. import os __all__ = [ 'fig2files' ]", "portfolio-robustfpm-framework Authors # Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or #", "to those terms. import os __all__ = [ 'fig2files' ] def fig2files(plt, dirname,", "under the Apache License, Version 2.0, <LICENSE-APACHE or # http://apache.org/licenses/LICENSE-2.0> or the MIT", "those terms. import os __all__ = [ 'fig2files' ] def fig2files(plt, dirname, filename,", "Apache License, Version 2.0, <LICENSE-APACHE or # http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT", "<reponame>andreevnick/robust-financial-portfolio-management-framework<filename>robustfpm/util/io.py<gh_stars>1-10 # Copyright 2021 portfolio-robustfpm-framework Authors # Licensed under the Apache License, Version", "# Copyright 2021 portfolio-robustfpm-framework Authors # Licensed under the Apache License, Version 2.0,", "<LICENSE-APACHE or # http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or # http://opensource.org/licenses/MIT>, at", "# Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or # http://apache.org/licenses/LICENSE-2.0> or", "= [ 'fig2files' ] def fig2files(plt, dirname, filename, dpi=None): os.makedirs('{0}/png'.format(dirname), exist_ok=True) plt.savefig('{0}/png/{1}.png'.format(dirname, filename),", "def fig2files(plt, dirname, filename, dpi=None): os.makedirs('{0}/png'.format(dirname), exist_ok=True) plt.savefig('{0}/png/{1}.png'.format(dirname, filename), dpi=dpi) os.makedirs('{0}/eps'.format(dirname), exist_ok=True) plt.savefig('{0}/eps/{1}.eps'.format(dirname,", "exist_ok=True) plt.savefig('{0}/png/{1}.png'.format(dirname, filename), dpi=dpi) os.makedirs('{0}/eps'.format(dirname), exist_ok=True) plt.savefig('{0}/eps/{1}.eps'.format(dirname, filename), format='eps') os.makedirs('{0}/svg'.format(dirname), exist_ok=True) plt.savefig('{0}/svg/{1}.svg'.format(dirname, filename),", "http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or # http://opensource.org/licenses/MIT>, at your option. This", "filename, dpi=None): os.makedirs('{0}/png'.format(dirname), exist_ok=True) plt.savefig('{0}/png/{1}.png'.format(dirname, filename), dpi=dpi) os.makedirs('{0}/eps'.format(dirname), exist_ok=True) plt.savefig('{0}/eps/{1}.eps'.format(dirname, filename), format='eps') os.makedirs('{0}/svg'.format(dirname),", "2021 portfolio-robustfpm-framework Authors # Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or", "or # http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or # http://opensource.org/licenses/MIT>, at your", "at your option. This file may not be # copied, modified, or distributed", "os __all__ = [ 'fig2files' ] def fig2files(plt, dirname, filename, dpi=None): os.makedirs('{0}/png'.format(dirname), exist_ok=True)", "<LICENSE-MIT or # http://opensource.org/licenses/MIT>, at your option. This file may not be #", "License, Version 2.0, <LICENSE-APACHE or # http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or", "not be # copied, modified, or distributed except according to those terms. import", "dirname, filename, dpi=None): os.makedirs('{0}/png'.format(dirname), exist_ok=True) plt.savefig('{0}/png/{1}.png'.format(dirname, filename), dpi=dpi) os.makedirs('{0}/eps'.format(dirname), exist_ok=True) plt.savefig('{0}/eps/{1}.eps'.format(dirname, filename), format='eps')", "# http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or # http://opensource.org/licenses/MIT>, at your option.", "os.makedirs('{0}/png'.format(dirname), exist_ok=True) plt.savefig('{0}/png/{1}.png'.format(dirname, filename), dpi=dpi) os.makedirs('{0}/eps'.format(dirname), exist_ok=True) plt.savefig('{0}/eps/{1}.eps'.format(dirname, filename), format='eps') os.makedirs('{0}/svg'.format(dirname), exist_ok=True) plt.savefig('{0}/svg/{1}.svg'.format(dirname,", "fig2files(plt, dirname, filename, dpi=None): os.makedirs('{0}/png'.format(dirname), exist_ok=True) plt.savefig('{0}/png/{1}.png'.format(dirname, filename), dpi=dpi) os.makedirs('{0}/eps'.format(dirname), exist_ok=True) plt.savefig('{0}/eps/{1}.eps'.format(dirname, filename),", "your option. This file may not be # copied, modified, or distributed except", "be # copied, modified, or distributed except according to those terms. import os", "# http://opensource.org/licenses/MIT>, at your option. This file may not be # copied, modified,", "option. This file may not be # copied, modified, or distributed except according", "modified, or distributed except according to those terms. import os __all__ = [", "Authors # Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or # http://apache.org/licenses/LICENSE-2.0>", "the MIT license <LICENSE-MIT or # http://opensource.org/licenses/MIT>, at your option. This file may" ]
[ "default_args=None): return build_from_cfg(cfg, PRIOR_GENERATORS, default_args) def build_anchor_generator(cfg, default_args=None): warnings.warn( '``build_anchor_generator`` would be deprecated", "default_args=None): warnings.warn( '``build_anchor_generator`` would be deprecated soon, please use ' '``build_prior_generator`` ') return", "def build_anchor_generator(cfg, default_args=None): warnings.warn( '``build_anchor_generator`` would be deprecated soon, please use ' '``build_prior_generator``", "PRIOR_GENERATORS def build_prior_generator(cfg, default_args=None): return build_from_cfg(cfg, PRIOR_GENERATORS, default_args) def build_anchor_generator(cfg, default_args=None): warnings.warn( '``build_anchor_generator``", "for anchors and points') ANCHOR_GENERATORS = PRIOR_GENERATORS def build_prior_generator(cfg, default_args=None): return build_from_cfg(cfg, PRIOR_GENERATORS,", "rights reserved. import warnings from mmcv.utils import Registry, build_from_cfg PRIOR_GENERATORS = Registry('Generator for", "'``build_anchor_generator`` would be deprecated soon, please use ' '``build_prior_generator`` ') return build_prior_generator(cfg, default_args=default_args)", "default_args) def build_anchor_generator(cfg, default_args=None): warnings.warn( '``build_anchor_generator`` would be deprecated soon, please use '", "# Copyright (c) OpenMMLab. All rights reserved. import warnings from mmcv.utils import Registry,", "Registry('Generator for anchors and points') ANCHOR_GENERATORS = PRIOR_GENERATORS def build_prior_generator(cfg, default_args=None): return build_from_cfg(cfg,", "Registry, build_from_cfg PRIOR_GENERATORS = Registry('Generator for anchors and points') ANCHOR_GENERATORS = PRIOR_GENERATORS def", "def build_prior_generator(cfg, default_args=None): return build_from_cfg(cfg, PRIOR_GENERATORS, default_args) def build_anchor_generator(cfg, default_args=None): warnings.warn( '``build_anchor_generator`` would", "warnings.warn( '``build_anchor_generator`` would be deprecated soon, please use ' '``build_prior_generator`` ') return build_prior_generator(cfg,", "reserved. import warnings from mmcv.utils import Registry, build_from_cfg PRIOR_GENERATORS = Registry('Generator for anchors", "and points') ANCHOR_GENERATORS = PRIOR_GENERATORS def build_prior_generator(cfg, default_args=None): return build_from_cfg(cfg, PRIOR_GENERATORS, default_args) def", "mmcv.utils import Registry, build_from_cfg PRIOR_GENERATORS = Registry('Generator for anchors and points') ANCHOR_GENERATORS =", "build_from_cfg(cfg, PRIOR_GENERATORS, default_args) def build_anchor_generator(cfg, default_args=None): warnings.warn( '``build_anchor_generator`` would be deprecated soon, please", "from mmcv.utils import Registry, build_from_cfg PRIOR_GENERATORS = Registry('Generator for anchors and points') ANCHOR_GENERATORS", "(c) OpenMMLab. All rights reserved. import warnings from mmcv.utils import Registry, build_from_cfg PRIOR_GENERATORS", "build_prior_generator(cfg, default_args=None): return build_from_cfg(cfg, PRIOR_GENERATORS, default_args) def build_anchor_generator(cfg, default_args=None): warnings.warn( '``build_anchor_generator`` would be", "import warnings from mmcv.utils import Registry, build_from_cfg PRIOR_GENERATORS = Registry('Generator for anchors and", "build_from_cfg PRIOR_GENERATORS = Registry('Generator for anchors and points') ANCHOR_GENERATORS = PRIOR_GENERATORS def build_prior_generator(cfg,", "ANCHOR_GENERATORS = PRIOR_GENERATORS def build_prior_generator(cfg, default_args=None): return build_from_cfg(cfg, PRIOR_GENERATORS, default_args) def build_anchor_generator(cfg, default_args=None):", "= Registry('Generator for anchors and points') ANCHOR_GENERATORS = PRIOR_GENERATORS def build_prior_generator(cfg, default_args=None): return", "Copyright (c) OpenMMLab. All rights reserved. import warnings from mmcv.utils import Registry, build_from_cfg", "OpenMMLab. All rights reserved. import warnings from mmcv.utils import Registry, build_from_cfg PRIOR_GENERATORS =", "PRIOR_GENERATORS = Registry('Generator for anchors and points') ANCHOR_GENERATORS = PRIOR_GENERATORS def build_prior_generator(cfg, default_args=None):", "import Registry, build_from_cfg PRIOR_GENERATORS = Registry('Generator for anchors and points') ANCHOR_GENERATORS = PRIOR_GENERATORS", "return build_from_cfg(cfg, PRIOR_GENERATORS, default_args) def build_anchor_generator(cfg, default_args=None): warnings.warn( '``build_anchor_generator`` would be deprecated soon,", "All rights reserved. import warnings from mmcv.utils import Registry, build_from_cfg PRIOR_GENERATORS = Registry('Generator", "warnings from mmcv.utils import Registry, build_from_cfg PRIOR_GENERATORS = Registry('Generator for anchors and points')", "build_anchor_generator(cfg, default_args=None): warnings.warn( '``build_anchor_generator`` would be deprecated soon, please use ' '``build_prior_generator`` ')", "anchors and points') ANCHOR_GENERATORS = PRIOR_GENERATORS def build_prior_generator(cfg, default_args=None): return build_from_cfg(cfg, PRIOR_GENERATORS, default_args)", "PRIOR_GENERATORS, default_args) def build_anchor_generator(cfg, default_args=None): warnings.warn( '``build_anchor_generator`` would be deprecated soon, please use", "points') ANCHOR_GENERATORS = PRIOR_GENERATORS def build_prior_generator(cfg, default_args=None): return build_from_cfg(cfg, PRIOR_GENERATORS, default_args) def build_anchor_generator(cfg,", "= PRIOR_GENERATORS def build_prior_generator(cfg, default_args=None): return build_from_cfg(cfg, PRIOR_GENERATORS, default_args) def build_anchor_generator(cfg, default_args=None): warnings.warn(" ]
[ "''.join(f'{char:08b}' for char in raw_file.read().encode('utf-8')) bits_list = re.findall(\".{1,6}\", blob) if len(bits_list[-1]) == 2:", "== '-e': encode(argv[2], argv[3]) elif argv[1] == '--decode' or argv[1] == '-d': decode(argv[2],", "if char != '=') bits_list = re.findall(\".{1,8}\", blob) if len(bits_list[-1]) != 8: del", "bits_list)) raw_file.close() encrypted_file.close() if __name__ == '__main__': if len(argv) <= 3: print(\"Not enough", "in bits_list)) encrypted_file.write(end) raw_file.close() encrypted_file.close() def decode(encrypted_file_path, raw_file_path): encrypted_file = open(encrypted_file_path, 'r') raw_file", "encode(raw_file_path, encrypted_file_path): raw_file = open(raw_file_path, 'r') encrypted_file = open(encrypted_file_path, 'w') blob = ''.join(f'{char:08b}'", "{argv[1]}.\", file=stderr) exit(2) except EnvironmentError: print(f\"Problem with access {argv[2]} or {argv[3]} occurred.\" f\"Does", "= ''.join(f'{char:08b}' for char in raw_file.read().encode('utf-8')) bits_list = re.findall(\".{1,6}\", blob) if len(bits_list[-1]) ==", "'--encode' or argv[1] == '-e': encode(argv[2], argv[3]) elif argv[1] == '--decode' or argv[1]", "'00' end = '=' else: end = '' encrypted_file.write(''.join(_base64_str[int(bits, 2)] for bits in", "+= '0000' end = '==' elif len(bits_list[-1]) == 4: bits_list[-1] += '00' end", "'-e': encode(argv[2], argv[3]) elif argv[1] == '--decode' or argv[1] == '-d': decode(argv[2], argv[3])", "encrypted_file.close() if __name__ == '__main__': if len(argv) <= 3: print(\"Not enough parameters.\", file=stderr)", "raw_file.close() encrypted_file.close() def decode(encrypted_file_path, raw_file_path): encrypted_file = open(encrypted_file_path, 'r') raw_file = open(raw_file_path, 'w')", "= '=' else: end = '' encrypted_file.write(''.join(_base64_str[int(bits, 2)] for bits in bits_list)) encrypted_file.write(end)", "'=' else: end = '' encrypted_file.write(''.join(_base64_str[int(bits, 2)] for bits in bits_list)) encrypted_file.write(end) raw_file.close()", "open(encrypted_file_path, 'r') raw_file = open(raw_file_path, 'w') blob = ''.join(f'{_base64_str.index(char):06b}' for char in encrypted_file.read()", "else: end = '' encrypted_file.write(''.join(_base64_str[int(bits, 2)] for bits in bits_list)) encrypted_file.write(end) raw_file.close() encrypted_file.close()", "file=stderr) exit(2) except EnvironmentError: print(f\"Problem with access {argv[2]} or {argv[3]} occurred.\" f\"Does {argv[2]}", "or argv[1] == '-d': decode(argv[2], argv[3]) else: print(f\"Unknown parameter {argv[1]}.\", file=stderr) exit(2) except", "in bits_list)) raw_file.close() encrypted_file.close() if __name__ == '__main__': if len(argv) <= 3: print(\"Not", "'' encrypted_file.write(''.join(_base64_str[int(bits, 2)] for bits in bits_list)) encrypted_file.write(end) raw_file.close() encrypted_file.close() def decode(encrypted_file_path, raw_file_path):", "char in encrypted_file.read() if char != '=') bits_list = re.findall(\".{1,8}\", blob) if len(bits_list[-1])", "enough parameters.\", file=stderr) exit(1) try: if argv[1] == '--encode' or argv[1] == '-e':", "== '__main__': if len(argv) <= 3: print(\"Not enough parameters.\", file=stderr) exit(1) try: if", "blob) if len(bits_list[-1]) == 2: bits_list[-1] += '0000' end = '==' elif len(bits_list[-1])", "encrypted_file.write(''.join(_base64_str[int(bits, 2)] for bits in bits_list)) encrypted_file.write(end) raw_file.close() encrypted_file.close() def decode(encrypted_file_path, raw_file_path): encrypted_file", "for char in raw_file.read().encode('utf-8')) bits_list = re.findall(\".{1,6}\", blob) if len(bits_list[-1]) == 2: bits_list[-1]", "2)) for bits in bits_list)) raw_file.close() encrypted_file.close() if __name__ == '__main__': if len(argv)", "import re _base64_str = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' def encode(raw_file_path, encrypted_file_path): raw_file = open(raw_file_path, 'r') encrypted_file", "2: bits_list[-1] += '0000' end = '==' elif len(bits_list[-1]) == 4: bits_list[-1] +=", "len(bits_list[-1]) == 4: bits_list[-1] += '00' end = '=' else: end = ''", "char in raw_file.read().encode('utf-8')) bits_list = re.findall(\".{1,6}\", blob) if len(bits_list[-1]) == 2: bits_list[-1] +=", "decode(argv[2], argv[3]) else: print(f\"Unknown parameter {argv[1]}.\", file=stderr) exit(2) except EnvironmentError: print(f\"Problem with access", "except EnvironmentError: print(f\"Problem with access {argv[2]} or {argv[3]} occurred.\" f\"Does {argv[2]} exist?\", file=stderr)", "EnvironmentError: print(f\"Problem with access {argv[2]} or {argv[3]} occurred.\" f\"Does {argv[2]} exist?\", file=stderr) exit(3)", "'0000' end = '==' elif len(bits_list[-1]) == 4: bits_list[-1] += '00' end =", "from sys import argv, stderr import re _base64_str = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' def encode(raw_file_path, encrypted_file_path):", "encode(argv[2], argv[3]) elif argv[1] == '--decode' or argv[1] == '-d': decode(argv[2], argv[3]) else:", "'w') blob = ''.join(f'{char:08b}' for char in raw_file.read().encode('utf-8')) bits_list = re.findall(\".{1,6}\", blob) if", "bits_list[-1] += '0000' end = '==' elif len(bits_list[-1]) == 4: bits_list[-1] += '00'", "8: del bits_list[-1] raw_file.write(''.join(chr(int(bits, 2)) for bits in bits_list)) raw_file.close() encrypted_file.close() if __name__", "!= 8: del bits_list[-1] raw_file.write(''.join(chr(int(bits, 2)) for bits in bits_list)) raw_file.close() encrypted_file.close() if", "'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' def encode(raw_file_path, encrypted_file_path): raw_file = open(raw_file_path, 'r') encrypted_file = open(encrypted_file_path, 'w') blob", "bits_list = re.findall(\".{1,6}\", blob) if len(bits_list[-1]) == 2: bits_list[-1] += '0000' end =", "re _base64_str = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' def encode(raw_file_path, encrypted_file_path): raw_file = open(raw_file_path, 'r') encrypted_file =", "'w') blob = ''.join(f'{_base64_str.index(char):06b}' for char in encrypted_file.read() if char != '=') bits_list", "sys import argv, stderr import re _base64_str = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' def encode(raw_file_path, encrypted_file_path): raw_file", "for char in encrypted_file.read() if char != '=') bits_list = re.findall(\".{1,8}\", blob) if", "raw_file = open(raw_file_path, 'w') blob = ''.join(f'{_base64_str.index(char):06b}' for char in encrypted_file.read() if char", "in encrypted_file.read() if char != '=') bits_list = re.findall(\".{1,8}\", blob) if len(bits_list[-1]) !=", "= open(raw_file_path, 'w') blob = ''.join(f'{_base64_str.index(char):06b}' for char in encrypted_file.read() if char !=", "bits_list[-1] raw_file.write(''.join(chr(int(bits, 2)) for bits in bits_list)) raw_file.close() encrypted_file.close() if __name__ == '__main__':", "__name__ == '__main__': if len(argv) <= 3: print(\"Not enough parameters.\", file=stderr) exit(1) try:", "!= '=') bits_list = re.findall(\".{1,8}\", blob) if len(bits_list[-1]) != 8: del bits_list[-1] raw_file.write(''.join(chr(int(bits,", "if len(argv) <= 3: print(\"Not enough parameters.\", file=stderr) exit(1) try: if argv[1] ==", "argv[1] == '--decode' or argv[1] == '-d': decode(argv[2], argv[3]) else: print(f\"Unknown parameter {argv[1]}.\",", "raw_file = open(raw_file_path, 'r') encrypted_file = open(encrypted_file_path, 'w') blob = ''.join(f'{char:08b}' for char", "= open(raw_file_path, 'r') encrypted_file = open(encrypted_file_path, 'w') blob = ''.join(f'{char:08b}' for char in", "re.findall(\".{1,8}\", blob) if len(bits_list[-1]) != 8: del bits_list[-1] raw_file.write(''.join(chr(int(bits, 2)) for bits in", "argv[1] == '--encode' or argv[1] == '-e': encode(argv[2], argv[3]) elif argv[1] == '--decode'", "argv[1] == '-e': encode(argv[2], argv[3]) elif argv[1] == '--decode' or argv[1] == '-d':", "del bits_list[-1] raw_file.write(''.join(chr(int(bits, 2)) for bits in bits_list)) raw_file.close() encrypted_file.close() if __name__ ==", "bits in bits_list)) raw_file.close() encrypted_file.close() if __name__ == '__main__': if len(argv) <= 3:", "open(encrypted_file_path, 'w') blob = ''.join(f'{char:08b}' for char in raw_file.read().encode('utf-8')) bits_list = re.findall(\".{1,6}\", blob)", "'__main__': if len(argv) <= 3: print(\"Not enough parameters.\", file=stderr) exit(1) try: if argv[1]", "'-d': decode(argv[2], argv[3]) else: print(f\"Unknown parameter {argv[1]}.\", file=stderr) exit(2) except EnvironmentError: print(f\"Problem with", "try: if argv[1] == '--encode' or argv[1] == '-e': encode(argv[2], argv[3]) elif argv[1]", "== '--encode' or argv[1] == '-e': encode(argv[2], argv[3]) elif argv[1] == '--decode' or", "= ''.join(f'{_base64_str.index(char):06b}' for char in encrypted_file.read() if char != '=') bits_list = re.findall(\".{1,8}\",", "in raw_file.read().encode('utf-8')) bits_list = re.findall(\".{1,6}\", blob) if len(bits_list[-1]) == 2: bits_list[-1] += '0000'", "''.join(f'{_base64_str.index(char):06b}' for char in encrypted_file.read() if char != '=') bits_list = re.findall(\".{1,8}\", blob)", "_base64_str = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' def encode(raw_file_path, encrypted_file_path): raw_file = open(raw_file_path, 'r') encrypted_file = open(encrypted_file_path,", "file=stderr) exit(1) try: if argv[1] == '--encode' or argv[1] == '-e': encode(argv[2], argv[3])", "argv[3]) else: print(f\"Unknown parameter {argv[1]}.\", file=stderr) exit(2) except EnvironmentError: print(f\"Problem with access {argv[2]}", "def encode(raw_file_path, encrypted_file_path): raw_file = open(raw_file_path, 'r') encrypted_file = open(encrypted_file_path, 'w') blob =", "elif len(bits_list[-1]) == 4: bits_list[-1] += '00' end = '=' else: end =", "elif argv[1] == '--decode' or argv[1] == '-d': decode(argv[2], argv[3]) else: print(f\"Unknown parameter", "bits in bits_list)) encrypted_file.write(end) raw_file.close() encrypted_file.close() def decode(encrypted_file_path, raw_file_path): encrypted_file = open(encrypted_file_path, 'r')", "def decode(encrypted_file_path, raw_file_path): encrypted_file = open(encrypted_file_path, 'r') raw_file = open(raw_file_path, 'w') blob =", "'r') encrypted_file = open(encrypted_file_path, 'w') blob = ''.join(f'{char:08b}' for char in raw_file.read().encode('utf-8')) bits_list", "stderr import re _base64_str = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' def encode(raw_file_path, encrypted_file_path): raw_file = open(raw_file_path, 'r')", "print(\"Not enough parameters.\", file=stderr) exit(1) try: if argv[1] == '--encode' or argv[1] ==", "end = '==' elif len(bits_list[-1]) == 4: bits_list[-1] += '00' end = '='", "== 4: bits_list[-1] += '00' end = '=' else: end = '' encrypted_file.write(''.join(_base64_str[int(bits,", "exit(1) try: if argv[1] == '--encode' or argv[1] == '-e': encode(argv[2], argv[3]) elif", "bits_list[-1] += '00' end = '=' else: end = '' encrypted_file.write(''.join(_base64_str[int(bits, 2)] for", "import argv, stderr import re _base64_str = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' def encode(raw_file_path, encrypted_file_path): raw_file =", "encrypted_file_path): raw_file = open(raw_file_path, 'r') encrypted_file = open(encrypted_file_path, 'w') blob = ''.join(f'{char:08b}' for", "== '--decode' or argv[1] == '-d': decode(argv[2], argv[3]) else: print(f\"Unknown parameter {argv[1]}.\", file=stderr)", "raw_file.close() encrypted_file.close() if __name__ == '__main__': if len(argv) <= 3: print(\"Not enough parameters.\",", "blob = ''.join(f'{_base64_str.index(char):06b}' for char in encrypted_file.read() if char != '=') bits_list =", "'--decode' or argv[1] == '-d': decode(argv[2], argv[3]) else: print(f\"Unknown parameter {argv[1]}.\", file=stderr) exit(2)", "blob) if len(bits_list[-1]) != 8: del bits_list[-1] raw_file.write(''.join(chr(int(bits, 2)) for bits in bits_list))", "== 2: bits_list[-1] += '0000' end = '==' elif len(bits_list[-1]) == 4: bits_list[-1]", "= re.findall(\".{1,6}\", blob) if len(bits_list[-1]) == 2: bits_list[-1] += '0000' end = '=='", "= open(encrypted_file_path, 'r') raw_file = open(raw_file_path, 'w') blob = ''.join(f'{_base64_str.index(char):06b}' for char in", "len(bits_list[-1]) == 2: bits_list[-1] += '0000' end = '==' elif len(bits_list[-1]) == 4:", "if len(bits_list[-1]) != 8: del bits_list[-1] raw_file.write(''.join(chr(int(bits, 2)) for bits in bits_list)) raw_file.close()", "encrypted_file.write(end) raw_file.close() encrypted_file.close() def decode(encrypted_file_path, raw_file_path): encrypted_file = open(encrypted_file_path, 'r') raw_file = open(raw_file_path,", "= re.findall(\".{1,8}\", blob) if len(bits_list[-1]) != 8: del bits_list[-1] raw_file.write(''.join(chr(int(bits, 2)) for bits", "4: bits_list[-1] += '00' end = '=' else: end = '' encrypted_file.write(''.join(_base64_str[int(bits, 2)]", "decode(encrypted_file_path, raw_file_path): encrypted_file = open(encrypted_file_path, 'r') raw_file = open(raw_file_path, 'w') blob = ''.join(f'{_base64_str.index(char):06b}'", "encrypted_file = open(encrypted_file_path, 'w') blob = ''.join(f'{char:08b}' for char in raw_file.read().encode('utf-8')) bits_list =", "'r') raw_file = open(raw_file_path, 'w') blob = ''.join(f'{_base64_str.index(char):06b}' for char in encrypted_file.read() if", "== '-d': decode(argv[2], argv[3]) else: print(f\"Unknown parameter {argv[1]}.\", file=stderr) exit(2) except EnvironmentError: print(f\"Problem", "re.findall(\".{1,6}\", blob) if len(bits_list[-1]) == 2: bits_list[-1] += '0000' end = '==' elif", "bits_list)) encrypted_file.write(end) raw_file.close() encrypted_file.close() def decode(encrypted_file_path, raw_file_path): encrypted_file = open(encrypted_file_path, 'r') raw_file =", "end = '=' else: end = '' encrypted_file.write(''.join(_base64_str[int(bits, 2)] for bits in bits_list))", "len(bits_list[-1]) != 8: del bits_list[-1] raw_file.write(''.join(chr(int(bits, 2)) for bits in bits_list)) raw_file.close() encrypted_file.close()", "if __name__ == '__main__': if len(argv) <= 3: print(\"Not enough parameters.\", file=stderr) exit(1)", "parameter {argv[1]}.\", file=stderr) exit(2) except EnvironmentError: print(f\"Problem with access {argv[2]} or {argv[3]} occurred.\"", "2)] for bits in bits_list)) encrypted_file.write(end) raw_file.close() encrypted_file.close() def decode(encrypted_file_path, raw_file_path): encrypted_file =", "parameters.\", file=stderr) exit(1) try: if argv[1] == '--encode' or argv[1] == '-e': encode(argv[2],", "3: print(\"Not enough parameters.\", file=stderr) exit(1) try: if argv[1] == '--encode' or argv[1]", "raw_file.read().encode('utf-8')) bits_list = re.findall(\".{1,6}\", blob) if len(bits_list[-1]) == 2: bits_list[-1] += '0000' end", "print(f\"Unknown parameter {argv[1]}.\", file=stderr) exit(2) except EnvironmentError: print(f\"Problem with access {argv[2]} or {argv[3]}", "argv[1] == '-d': decode(argv[2], argv[3]) else: print(f\"Unknown parameter {argv[1]}.\", file=stderr) exit(2) except EnvironmentError:", "= 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' def encode(raw_file_path, encrypted_file_path): raw_file = open(raw_file_path, 'r') encrypted_file = open(encrypted_file_path, 'w')", "+= '00' end = '=' else: end = '' encrypted_file.write(''.join(_base64_str[int(bits, 2)] for bits", "end = '' encrypted_file.write(''.join(_base64_str[int(bits, 2)] for bits in bits_list)) encrypted_file.write(end) raw_file.close() encrypted_file.close() def", "encrypted_file.read() if char != '=') bits_list = re.findall(\".{1,8}\", blob) if len(bits_list[-1]) != 8:", "blob = ''.join(f'{char:08b}' for char in raw_file.read().encode('utf-8')) bits_list = re.findall(\".{1,6}\", blob) if len(bits_list[-1])", "argv, stderr import re _base64_str = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' def encode(raw_file_path, encrypted_file_path): raw_file = open(raw_file_path,", "'==' elif len(bits_list[-1]) == 4: bits_list[-1] += '00' end = '=' else: end", "bits_list = re.findall(\".{1,8}\", blob) if len(bits_list[-1]) != 8: del bits_list[-1] raw_file.write(''.join(chr(int(bits, 2)) for", "or argv[1] == '-e': encode(argv[2], argv[3]) elif argv[1] == '--decode' or argv[1] ==", "if argv[1] == '--encode' or argv[1] == '-e': encode(argv[2], argv[3]) elif argv[1] ==", "encrypted_file = open(encrypted_file_path, 'r') raw_file = open(raw_file_path, 'w') blob = ''.join(f'{_base64_str.index(char):06b}' for char", "encrypted_file.close() def decode(encrypted_file_path, raw_file_path): encrypted_file = open(encrypted_file_path, 'r') raw_file = open(raw_file_path, 'w') blob", "open(raw_file_path, 'r') encrypted_file = open(encrypted_file_path, 'w') blob = ''.join(f'{char:08b}' for char in raw_file.read().encode('utf-8'))", "<filename>list2/task2.py from sys import argv, stderr import re _base64_str = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' def encode(raw_file_path,", "open(raw_file_path, 'w') blob = ''.join(f'{_base64_str.index(char):06b}' for char in encrypted_file.read() if char != '=')", "raw_file.write(''.join(chr(int(bits, 2)) for bits in bits_list)) raw_file.close() encrypted_file.close() if __name__ == '__main__': if", "= '==' elif len(bits_list[-1]) == 4: bits_list[-1] += '00' end = '=' else:", "= '' encrypted_file.write(''.join(_base64_str[int(bits, 2)] for bits in bits_list)) encrypted_file.write(end) raw_file.close() encrypted_file.close() def decode(encrypted_file_path,", "argv[3]) elif argv[1] == '--decode' or argv[1] == '-d': decode(argv[2], argv[3]) else: print(f\"Unknown", "'=') bits_list = re.findall(\".{1,8}\", blob) if len(bits_list[-1]) != 8: del bits_list[-1] raw_file.write(''.join(chr(int(bits, 2))", "else: print(f\"Unknown parameter {argv[1]}.\", file=stderr) exit(2) except EnvironmentError: print(f\"Problem with access {argv[2]} or", "exit(2) except EnvironmentError: print(f\"Problem with access {argv[2]} or {argv[3]} occurred.\" f\"Does {argv[2]} exist?\",", "for bits in bits_list)) encrypted_file.write(end) raw_file.close() encrypted_file.close() def decode(encrypted_file_path, raw_file_path): encrypted_file = open(encrypted_file_path,", "len(argv) <= 3: print(\"Not enough parameters.\", file=stderr) exit(1) try: if argv[1] == '--encode'", "if len(bits_list[-1]) == 2: bits_list[-1] += '0000' end = '==' elif len(bits_list[-1]) ==", "<= 3: print(\"Not enough parameters.\", file=stderr) exit(1) try: if argv[1] == '--encode' or", "= open(encrypted_file_path, 'w') blob = ''.join(f'{char:08b}' for char in raw_file.read().encode('utf-8')) bits_list = re.findall(\".{1,6}\",", "raw_file_path): encrypted_file = open(encrypted_file_path, 'r') raw_file = open(raw_file_path, 'w') blob = ''.join(f'{_base64_str.index(char):06b}' for", "for bits in bits_list)) raw_file.close() encrypted_file.close() if __name__ == '__main__': if len(argv) <=", "char != '=') bits_list = re.findall(\".{1,8}\", blob) if len(bits_list[-1]) != 8: del bits_list[-1]" ]
[ "PythonMagick # note that nodes always point to same entries # unless we", "= (80, 100, 0) point7 = (90, 100, 0) point8 = (110, 100,", "self.root_entry def setRootEntry(self, root_entry): self.root_entry = root_entry def hasConsistentNonTraditionalLeafDepthValues(self): root = self.getRootEntry().getChild() curr_node", "is in order of low-level to high-level; # wish to insert using order", "RTree.rstarAdjustTreeHelper(tree, node.getParent(), resulting_entries_from_split, have_resulting_second_entry_from_split) \"\"\" @staticmethod def adjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split, is_first_call_after_first_pass): if", "term2 surface_area += term margin = surface_area return margin def toString(self): upper_left =", "[entry])) return self.rstarSplitNodeHelper(node, E_overall, entry) def rstarSplitNodeHelper(self, node, E_overall, entry): # prev_leaf_status =", "only child pass def condenseTree(self, leaf_node): Q = [] self.condenseTreeHelper(leaf_node, Q) # Q", "point5 = (70, 100, 0) point6 = (80, 100, 0) point7 = (90,", "if curr_entry != entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node1) for curr_entry in entry_group2: next_curr_node = curr_entry.getChild()", "candidate_tagged_enlargement_values] return candidate_entries def resolveEnlargementTie(self, entries, entry): mbr = entry.getMBR() tagged_mbr_list = []", "node def isNonTraditionalLeafNode(self): is_non_traditional_leaf_node = (self.getParent() == None and self.getNumChildren() == 0) or", "self.toEntriesArePresentStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \" \") +", "root_mbr_is_contained = reference_mbr.doesEnclose(root_mbr) root_mbr_area = root_mbr.getArea() first_priority_component = 0 if root_mbr_is_contained == True", "= self.chooseEntriesWithMinimalAreaEnlargement(candidate_entries, entry) if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\" else:", "time on average for start rectangle # taken from set of actual rectangles", "offset, multiplier * y1 + offset) next_x2, next_y2 = (multiplier * x2 +", "in entries: child = entry.getChild() child_str = self.toEntriesArePresentStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str", "item = None if len(heap) != 0: (priority,item) = heapq.heappop(heap) elif len(internal_node_stack_deque) !=", "start_rectangle_entries = [x.getEntry() for x in start_rectangle_nodes] start_rectangle_to_close_ancestor_entries_dict = {} for start_rectangle_entry in", "in entries: if MBR.doOverlap(curr_entry.getMBR(), entry.getMBR()) == True: result = self.findLeafHelper(entry, next_entry) if result", "xrange(dimension): left_value1 = self.getUpperLeft()[i] left_value2 = mbr.getUpperLeft()[i] right_value1 = self.getLowerRight()[i] right_value2 = mbr.getLowerRight()[i]", "for x in combined_area_tagged_next_candidate_distributions if x[0] == min_combined_area_value] next_next_candidates = [x[1] for x", "# needed this curr_node.setParent(node) \"\"\" entries = node.getEntries() mbr_list = [x.getMBR() for x", "1402, 1548)), \\ ((433, 499, 483), (1300, 1330, 1055))] \"\"\" # n =", "RTreeEntry(mbr5, node5) node5.setEntry(entry5) tree.insert(entry5) mbr6 = RawMBR(point6, (110, 200, 100), point6) node6 =", "== True: # heapq.heappush(heap,pair) priority_tagged_internal_entries.sort(key = lambda x: x[0], reverse = True) for", "True) entry4 = RTreeEntry(mbr4, node4) node4.setEntry(entry4) tree.insert(entry4) mbr5 = RawMBR(point5, (110, 200, 100),", "entry in entries[0 : 15]: for entry in entries: # if len(tree.getNodes()) !=", "child_str = self.toEntriesArePresentStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \"", "HyperRectangle((50, 50, 0), (100, 100, 0), 1)) tree = RTree() print tree.toString() curr_root", "CompositeMBR(MBR): def __init__(self, upper_left, lower_right, mbr_list): MBR.__init__(self, upper_left, lower_right) self.mbr_list = mbr_list def", "# print \"tree, currently:\", tree.toString() # tree2.delete(entry) pass # print tree.toString() result =", "root_node = root_entry.getChild() root_mbr = root_entry.getMBR() root_mbr_is_actual = root_mbr.isRaw() root_mbr_is_contained = reference_mbr.doesEnclose(root_mbr) root_mbr_area", "xrange(self.getDimension()): comp1 = upper_left[i] comp2 = lower_right[i] side = comp2 - comp1 sides.append(side)", "order of high-level to low-level # Q = list(set(Q)) Q.reverse() for curr_node in", "for x in partner_entries] partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if have_resulting_second_entry_from_split == True: parent.removeEntry(entry)", "curr_entry = curr_node.getEntry() # print \"mbr:\", curr_entry.getMBR().toString() # print \"tree:\", self.toString() self.insert(curr_entry) def", "+ \")\" return overall_str def toString(self): root = self.getRootEntry().getChild() return self.toStringHelper(root) def toStringHelper(self,", "= [x.getMBR() for x in entry_collection2] mbr1 = CompositeMBR.makeMBR(mbr_collection1) mbr2 = CompositeMBR.makeMBR(mbr_collection2) overlap_area", "denominator)) # for n = 14500 # x1 = int(100 + random.randint(0, k)", "# n = 1000 # n = 20000 n = 1000 import math", "mbr, entry, partial_result): if entry.getMBR().isRaw() == True: # print mbr.toString(), entry.getMBR().toString() if mbr.doesEnclose(entry.getMBR())", "1000 import math for i in xrange(n): upper_left = None lower_right = None", "on 2016-11-16 to fix margin calculation # note that we assume rectangles are", "200, 100), point3) node3 = RTreeNode(None, [], True) entry3 = RTreeEntry(mbr3, node3) node3.setEntry(entry3)", "= entry.getChild() entries = node.getEntries() mbr_list = [entry.getMBR()] for mbr in mbr_list: upper_left", "string.join(overall_str_list, \" \") + \")\" return overall_str def toString(self): root = self.getRootEntry().getChild() return", "temporarily look like leaf nodes # keep_nodes = [x for x in self.getNodesForNode(node)", "1075, 1542)), \\ ((358, 815, 372), (761, 1089, 594)), \\ ((294, 238, 1036),", "result[i] S_comp_value = 0 low_constituent_mbr_list_pairs = [([y.getMBR() for y in x[0]], [y.getMBR() for", "for x in mbr_list]) for curr_mbr in mbr_list: next_mbr = RawMBR(curr_mbr.getUpperLeft(), curr_mbr.getLowerRight(), None)", "== 1: upper_left = (20, 20) lower_right = (40, 40) elif i %", "RawMBR(MBR): def __init__(self, upper_left, lower_right, contained_item): MBR.__init__(self, upper_left, lower_right) self.contained_item = contained_item def", "True # assume that rectangles never have negative area for i in xrange(mbr_a.getDimension()):", "actual rectangle # decide whether to include associated entry in result; # if", "# have supernode demotion when size decreases to or below M # updated", "= parent.retrieveEntryForChild(node) children = [x.getChild() for x in curr_entries] mbr_list = [x.getMBR() for", "def doEnclosureQueryWithEarlyStopping(self, mbr): result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, self.getRootEntry()) return result def doEnclosureQueryWithEarlyStoppingHelper(self, mbr, entry):", "import string class RTree: def __init__(self): root_node = RTreeNode(None, [], True) root_mbr =", "x[0]], [y.getMBR() for y in x[1]]), x) for x in candidate_distributions] mbr_pair_tagged_candidate_distributions =", "6,000 works in 56.672 sec. for pypy with m = 8 and M", "#1\" return (RTree.SUPERNODE, [node]) elif split_status == RTree.SUPERNODE: pass # print \"no split\"", "1000 # k = int(round((5500 * math.log(5500, 2)) ** (1 / 3.0) /", "return is_equal class RawMBR(MBR): def __init__(self, upper_left, lower_right, contained_item): MBR.__init__(self, upper_left, lower_right) self.contained_item", "rstarAdjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split): return tree.rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split) @staticmethod def rstarAdjustTreeHelper(tree, node,", "= entries[i] curr_mbr = curr_entry.getMBR() mbr_to_entry_dict[curr_mbr] = curr_entry mbr_list = [x.getMBR() for x", "return True else: entries = entry.getChild().getEntries() for curr_entry in entries: if curr_entry.getMBR().doesEnclose(mbr) ==", "lower_right[i] term1 = comp_1b - comp_1a for j in xrange(i + 1, self.getDimension()):", "delete\") # if parent has zero entries after removing this entry, this should", "in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() == True] for keep_node in keep_nodes: Q.append(keep_node) # only", "center_x = next_x1 center_y = next_y1 radius = 4 perimeter_x = next_x1 perimeter_y", "** 2 + change_y ** 2) return distance class RTreeNode: def __init__(self, parent,", "= [x[0] for x in tagged_area_values] min_area = min(area_values) candidate_tagged_area_values = [x for", "rectangles # for an r-tree and O(n * log(n)) time at worst; #", "parent.addEntry(partner_entry) entry.getChild().setParent(parent) partner_entry.getChild().setParent(parent) return tree.adjustTree(tree, parent, [entry], False, False) else: parent.addEntry(entry) entry.getChild().setParent(parent) split_result", "(80, 100, 0) point7 = (90, 100, 0) point8 = (110, 100, 0)", "ee], True, False) else: return (False, []) \"\"\" # assume item is in", "if without_borders == True: do_overlap = do_overlap and comp_a1 < comp_b2 and comp_a2", "(1, 10, 10), (3, 10, 10)] # for i in xrange(10): # for", "continue if entry == ignore_entry: # ignore node if its entry matches the", "could have a safe path to a leaf where the leaf mbr #", "of entry child if curr_entry.getMBR().isRaw() == True: if entry == curr_entry: return True", "decide whether to include associated entry in result; # if we made it", "y1 distance = math.sqrt(change_x ** 2 + change_y ** 2) return distance class", "depth, 0) def hasConsistentNonTraditionalLeafDepthValuesHelper(self, node, depth, curr_depth): if node == None: return elif", "parent, entries, is_leaf, entry = None, split_history_root_dimension = None, is_supernode = False): self.parent", "= entry.getChild().getEntries() children = [x.getChild() for x in curr_entries] mbr_list = [x.getMBR() for", "= priority_tagged_internal_entry item = internal_entry internal_node_stack_deque.appendleft(item) # print \"conflict x-tree:\", conflict_x_tree.toString() # for", "for entry_to_close_ancestor_entry_list_pair in result.items(): entry, close_ancestor_entry_list = entry_to_close_ancestor_entry_list_pair print \"start rectangle:\", entry.getMBR().toString() for", "items def getSize(self): return len(self.heap) import math def getDistance(point1, point2): x1, y1 =", "xtreeInsert(); # have supernode demotion when size decreases to or below M #", "(RTree.SPLIT, [node1, node2]) else: self.xtreeSupernodeInsert(node, [x.getEntry() for x in added_nodes]) # print \"supernode", "for x in partner_entries] partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if node.isLeafNode() == False: if", "== upper_left2 and lower_right1 == lower_right2 return is_equal class RawMBR(MBR): def __init__(self, upper_left,", "0) curr_mbr1 = RawMBR((100, 100, 0), (100, 100, 0), (100, 100, 0)) curr_mbr2", "for curr_entry in entries: if curr_entry.getMBR().doesEnclose(mbr) == True: result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, curr_entry) if", "(8, 10, 10), (6, 10, 10), (9, 10, 10), (6, 10, 10), (9,", "# x1 = int(100 + random.randint(0, k) * 100) # y1 = int(100", "def chooseLeaf(self, entry): return self.chooseLeafHelper(entry, self.getRootEntry().getChild()) def chooseLeafHelper(self, entry, node): if node.isLeafNode() ==", "@staticmethod def getAreaEnlargement(base_mbr, mbr): base_mbr_area = base_mbr.getArea() enlarged_mbr = MBR.getEnlargedMBR(base_mbr, mbr) enlarged_mbr_area =", "return False def getUpperLeft(self): return self.upper_left def getLowerRight(self): return self.lower_right def getArea(self): upper_left", "depth % 3 color = None if color_choice == 0: color = PythonMagick.Color(65535,", "curr_mbr.isRaw() == False: if curr_mbr.doesEnclose(reference_mbr) == False and reference_mbr.doesEnclose(curr_mbr) == False: continue #", "(priority,item) heapq.heappush(self.heap,pair) def pop(self): (priority,item) = heapq.heappop(self.heap) return item def isEmpty(self): return len(self.heap)", "self.resolveEnlargementTie(candidate_entries, entry) chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return self.chooseLeafHelper(entry, chosen_child) def rstarChooseLeaf(self,", "(1 / 3.0) / denominator)) # for n = 10000 # k =", "= lambda x: x[0], reverse = True) for priority_tagged_internal_entry in priority_tagged_internal_entries: priority, internal_entry", "= id_value def getUpperLeft(self): return self.upper_left def getLowerRight(self): return self.lower_right def getIDValue(self): return", "leaf_node parent = node.getParent() if parent != None: curr_entries = node.getEntries() entry =", "Exception() \"\"\" # print tree.toString() # for entry in entries[0 : 4]: #", "int(100 + random.randint(0, k) * 100) # y1 = int(100 + random.randint(0, k)", "\"-\" if node.isLeafNode() == False else \"+\" overall_str_list = [curr_leaf_status] else: overall_str_list =", "= RTreeEntry(mbr2, node2) node2.setEntry(entry2) tree.insert(entry2) mbr3 = RawMBR(point3, (110, 200, 100), point3) node3", "# we never split a super-node if node.isSuperNode() == True: # raise Exception()", "without_borders = False): upper_left_a = mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft() lower_right_b", "time operation # to find the entry containing node; just look at parent", "self.doEnclosureQueryWithEarlyStoppingHelper(mbr, curr_entry) if result == True: return True return False # returns entries", "partner_entry = second_entry if have_resulting_second_entry_from_split == True and is_first_call_after_first_pass != True: partner_node =", "next_next_candidates = [x[1] for x in matching_combined_area_tagged_next_candidate_distributions] chosen_distribution_pair = next_next_candidates[0] return chosen_distribution_pair def", "left_value1 = self.getUpperLeft()[i] left_value2 = mbr.getUpperLeft()[i] right_value1 = self.getLowerRight()[i] right_value2 = mbr.getLowerRight()[i] component_does_enclose", "chooseLeafHelper(self, entry, node): if node.isLeafNode() == True: if node == self.getRootEntry().getChild(): return node", "- y1 distance = math.sqrt(change_x ** 2 + change_y ** 2) return distance", "= 16 # n = 6,000 works in 56.672 sec. for pypy with", "k) * 100) # z1 = int(100 + random.randint(0, k) * 100) #", "partial_result def doOverlapQueryHelper(self, mbr, entry, partial_result, without_borders): if entry.getMBR().isRaw() == True: if MBR.doOverlap(entry.getMBR(),", "\"white\") root_entry = self.getRootEntry() entries = [root_entry] RTreeEntry.draw(self, entries, image, 0) \"\"\" image.strokeColor(\"orange\")", "if parent has zero entries after removing this entry, this should be okay", "m) result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2 = result next_result =", "axis, M, m) entry_group1, entry_group2 = result next_result = (entry_group1, entry_group2, axis) return", "r-tree, takes O(n * log(n)) time; # these times involve n, which is", "node.getChildren(): self.getNodesHelper(curr_node, partial_result) def getNodesForNode(self, node): node_list = [] self.getNodesHelper(node, node_list) return node_list", "using enclosure/containment # w.r.t. reference rectangle, add children to priority queue, # ignore", "x1 change_y = y2 - y1 distance = math.sqrt(change_x ** 2 + change_y", "for y in x[1]]), x) for x in next_candidate_distributions] mbr_pair_tagged_next_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])),", "entry = parent.retrieveEntryForChild(node) children = [x.getChild() for x in curr_entries] mbr_list = [x.getMBR()", "upper_distributions) result_list.append(curr_tuple) return result_list @staticmethod def rstarChooseSplitAxis(entries, M, m): result = RTree.rstarGenDistributions(entries, M,", "self.split_history_root_dimension def setSplitHistoryRootDimension(self, dim): self.split_history_root_dimension = dim def getParent(self): return self.parent def getEntries(self):", "distance class RTreeNode: def __init__(self, parent, entries, is_leaf, entry = None, split_history_root_dimension =", "resulting_entries_from_split, False, self.getRootEntry()) l.setParent(next_root) ll.setParent(next_root) self.getRootEntry().setChild(next_root) else: pass MAX_OVERLAP_RATIO = 0.2 def xtreeSplitNode(self,", "= entry.getMBR() tagged_enlargement_values = [(MBR.getAreaEnlargement(x, mbr), x) for x in mbr_list] enlargement_values =", "if no match is found # finds one match if such a node", "* x next_y = multiplier * y image.strokeColor(\"none\") image.fillColor(\"black\") center_x = next_x +", "mbr is not contained within reference mbr continue if conflict_x_tree.doEnclosureQueryWithEarlyStopping(mbr) == True: #", "lower_right = point result_mbr = RawMBR(upper_left, lower_right, point) return result_mbr def getContainedItem(self): return", "# n = 14500 # 170.053 seconds (~398x slower for 145x growth; expected", "664x slower) # n = 14500 # 170.053 seconds (~398x slower for 145x", "or (self.getNumChildren() != 0 and False not in [x.getChild().getNumEntries() == 0 for x", "True) root_mbr = CompositeMBR(None, None, None) root_entry = RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) return", "partner_entry.setMBR(partner_tight_overall_mbr) if have_resulting_second_entry_from_split == True: parent.removeEntry(entry) if (parent.getNumChildren() + 2) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(entry)", "import math def getDistance(point1, point2): x1, y1 = point1 x2, y2 = point2", "Exception() if overlap_ratio > RTree.MAX_OVERLAP_RATIO: # raise Exception() result2 = self.xtreeOverlapMinimalSplit(node, entry) entry_collection3,", "1 / (1.0 * 6.5) * 0.8 offset = (1536 * 0.2) /", "partial_result) def getNodesForNode(self, node): node_list = [] self.getNodesHelper(node, node_list) return node_list \"\"\" def", "axis, M, m) entry_group1, entry_group2 = result parent = curr_node.getParent() \"\"\" if parent", "side = max(0, min(comp_a2, comp_b2) - max(comp_a1, comp_b1)) sides.append(side) intersection_volume = reduce(lambda x,", "min_S_value = min([x[1] for x in d_S_pairs]) min_S_value_d_S_pair_candidates = [x for x in", "xtreeInsertHelper(self, entry, node): split_status = None next_mbr = None if True: # if", "x in entry_group1] mbr_group2 = [x.getMBR() for x in entry_group2] curr_overall_mbr1 = CompositeMBR.makeMBR(mbr_group1)", "= RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2 = result next_result = (entry_group1, entry_group2,", "= PythonMagick.Image(PythonMagick.Geometry(\"1536x1536\"), \"white\") root_entry = self.getRootEntry() entries = [root_entry] RTreeEntry.draw(self, entries, image, 0)", "return margin def toString(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() result = str(list(upper_left", "are distinct # return a list of entries def getRectangleCloseDescendants(self, reference_entry): # repeatedly", "split_result = self.rstarSplitNode(self.getRootEntry().getChild(), ee) l, ll, e, ee = split_result resulting_entries_from_split = [e,", "toNumChildrenStringHelper(self, node): if node == None: return \"\" entries = node.getEntries() children =", "if we made it this far, we should add children to priority queue", "= root_entry pair = (priority,item) heapq.heappush(heap,pair) # print entry_pq # raise Exception() result_entry_list", "# if curr_mbr.doesEnclose(reference_mbr) == True or reference_mbr.doesEnclose(curr_mbr) == True: # heapq.heappush(heap,pair) priority_tagged_internal_entries.sort(key =", "= [x.getMBR() for x in entries] mbr = entry.getMBR() tagged_enlargement_values = [(MBR.findOverlapArea(x, mbr),", "== self.getRootEntry().getChild(): return node else: return node.getParent() else: entries = node.getEntries() candidate_entries =", "partner_node = partner_entry.getChild() partner_entries = partner_node.getEntries() partner_children = [x.getChild() for x in partner_entries]", "slower for 55x growth; expected 317x slower) # n = 10000 # 84.222", "False: continue # kick out close descendant candidates on occasion, # if containment", "entry.getMBR().isRaw() == True: if entry.getMBR().doesEnclose(mbr) == True: return True else: entries = entry.getChild().getEntries()", "= self.resolveEnlargementTie(candidate_entries, entry) \"\"\" chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return chosen_entry def", "heap, reference_mbr, result_entry_list, ignore_entry): conflict_x_tree = RTree() internal_node_stack_deque = deque() # while len(heap)", "if curr_mbr.doesEnclose(reference_mbr) == True or reference_mbr.doesEnclose(curr_mbr) == True: # heapq.heappush(heap,pair) priority_tagged_internal_entries.sort(key = lambda", "0), (50, 100, 0), point3) curr_mbr2b = RawMBR((50, 50, 0), (100, 100, 0),", "xrange(10): upper_left = (20, 20) lower_right = (40, 40) mbr = RawMBR(upper_left, lower_right,", "= curr_mbr.isRaw() curr_mbr_is_contained = reference_mbr.doesEnclose(curr_mbr) curr_mbr_area = curr_mbr.getArea() first_priority_component = 0 if curr_mbr_is_contained", "RawMBR(upper_left, lower_right, point) return result_mbr def getContainedItem(self): return self.contained_item def getMBRList(self): return [self]", "return True else: for curr_node in node.getChildren(): result = self.hasConsistentNonTraditionalLeafDepthValuesHelper(curr_node, depth, curr_depth +", "in 3.428 sec. for pypy with m = 8 and M = 16", "is a leaf node # search all entries of RN to find E.mbr", "PriorityQueue: def __init__(self): self.heap = [] def push(self, item, priority): pair = (priority,item)", "tagged_mbr_list.append((curr_mbr, curr_entry)) tagged_area_values = [(x[0].getArea(), x[1]) for x in tagged_mbr_list] area_values = [x[0]", "for a well-formed r-tree, this takes O(n * log(n)) time, # where n", "== 0: pass return (RTree.NO_SPLIT, [node]) \"\"\" follow = self.chooseSubtree(entry, node).getChild() result =", "100, 0) point8 = (110, 100, 0) curr_mbr1 = RawMBR((100, 100, 0), (100,", "components = [x[i] for x in points] min_comp_value = min(components) max_comp_value = max(components)", "= entry.getChild().getEntries() for curr_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), mbr) == True: self.doOverlapQueryHelper(mbr, curr_entry,", "entry2.setChild(node2) for curr_entry in entry_collection1: curr_entry.getChild().setParent(node1) for curr_entry in entry_collection2: curr_entry.getChild().setParent(node2) node1.setSplitHistoryRootDimension(dimension) node2.setSplitHistoryRootDimension(dimension)", "= mbr self.child = child def getMBR(self): return self.mbr def setMBR(self, mbr): self.mbr", "return result def getDimension(self): return len(self.getUpperLeft()) def doesEnclose(self, mbr): dimension = self.getDimension() does_enclose", "mbr): partial_result = [] self.doEnclosureQueryHelper(mbr, self.getRootEntry(), partial_result) return partial_result def doEnclosureQueryHelper(self, mbr, entry,", "doesEnclose(self, mbr): dimension = self.getDimension() does_enclose = True for i in xrange(dimension): left_value1", "\" \") + \")\" return overall_str def toString(self): root = self.getRootEntry().getChild() return self.toStringHelper(root)", "return False else: return True else: for curr_node in node.getChildren(): result = self.hasConsistentNonTraditionalLeafDepthValuesHelper(curr_node,", "curr_x_tree.insert(next_entry) union_area = curr_x_tree.getUnionArea() multi_overlap_ratio = overlap_area_sum / (1.0 * union_area) if multi_overlap_ratio", "which is number of actual rectangles # or leaves in r-tree; these times", "x[1]]) for x in low_comp_distributions] low_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x in low_constituent_mbr_list_pairs]", "Q): # demote super-node if necessary if node.isSuperNode() == True and node.getNumChildren() <=", "partial_result def doEnclosureQueryHelper(self, mbr, entry, partial_result): if entry.getMBR().isRaw() == True: if entry.getMBR().doesEnclose(mbr) ==", "# is not acyclic and we have cliques # note that we don't", "assume rectangles are unique for close-descendant # and close-ancestor finding; the assumption is", "RTreeEntry.draw(self, entries, image, 0) \"\"\" image.strokeColor(\"orange\") image.fillColor(\"none\") image.strokeWidth(4) multiplier = 3 * 0.8", "= [x[0] for x in tagged_enlargement_values] min_enlargement_value = min(enlargement_values) candidate_tagged_enlargement_values = [x for", "]) for j in xrange(len(window_left_sizes))] upper_sorted_entries = entries[ : ] upper_sorted_entries.sort(key = lambda", ": ] upper_sorted_entries.sort(key = lambda x: x.getMBR().getLowerRight()[i]) upper_distributions = [(upper_sorted_entries[ : window_left_sizes[j]], upper_sorted_entries[window_left_sizes[j]", "to fix margin calculation # note that we assume rectangles are unique for", "x[0][1].getArea(), x[1]) for x in mbr_pair_tagged_next_candidate_distributions] combined_area_values = [x[0] for x in combined_area_tagged_next_candidate_distributions]", "x1 return margin if self.getDimension() == 2: x1, y1 = upper_left x2, y2", "- x1) + 2 * (y2 - y1) return margin surface_area = 0", "return True else: return False else: entries = curr_entry.getChild().getEntries() for next_entry in entries:", "upper_comp_distributions = result[i] S_comp_value = 0 low_constituent_mbr_list_pairs = [([y.getMBR() for y in x[0]],", "not tested # returns entries # does intersection query def doOverlapQuery(self, mbr, without_borders", "= 100 # 0.427 seconds (~1x slower for 1x growth; expected 1x slower)", "union_area) if multi_overlap_ratio <= RTree.MAX_OVERLAP_RATIO: node.setToSuperNode(False) elif node.isUnderfull() == True: \"\"\" if node.isUnderfull()", "getDimension(self): return len(self.getUpperLeft()) def doesEnclose(self, mbr): dimension = self.getDimension() does_enclose = True for", "condenseTreeHelper(self, node, Q): # demote super-node if necessary if node.isSuperNode() == True and", "log(n)) time, # where n is number of actual rectangles or leaves; #", "perimeter_x, perimeter_y)) children = [x.getChild() for x in entries] entry.draw(tree, entries, image, depth", "6.5) * 0.8 offset = (1536 * 0.2) / 2 next_x = multiplier", "entry_group2 = result next_result = (entry_group1, entry_group2, axis) return next_result def xtreeOverlapMinimalSplit(self, node,", "updated on 2016-08-23 to fix traditional/non-traditional isLeafNode() distinction # updated on 2016-08-25 to", "# b \"right\" comp_b2 = max(upper_left_b[i], lower_right_b[i]) # print comp_a1, comp_a2, comp_b1, comp_b2", "M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries() axis = node.getSplitHistoryRootDimension() result = RTree.rstarChooseSplitIndex(E_overall, axis,", "resulting_entries_from_split, have_resulting_second_entry_from_split, is_first_call_after_first_pass): if node == None: return (False, []) else: parent =", "next_y2 = (multiplier * x2 + offset, multiplier * y2 + offset) if", "curr_mbr2 = RawMBR((50, 100, 0), (50, 100, 0), point3) curr_mbr2b = RawMBR((50, 50,", "def getArea(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() sides = [] for i", "y in x[1]]) for x in low_comp_distributions] low_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x", "[] # lower_rights = [(3, 10, 10), (1, 10, 10), (8, 10, 10),", "min_components.append(min_comp_value) max_components.append(max_comp_value) upper_left_point = tuple(min_components) lower_right_point = tuple(max_components) result_mbr = CompositeMBR(upper_left_point, lower_right_point, component_mbr_list)", "n, which is number of actual rectangles # or leaves in r-tree; these", "candidate_tagged_enlargement_values = [x for x in tagged_enlargement_values if x[0] == min_enlargement_value] candidate_entries =", "works in 56.672 sec. for pypy with m = 8 and M =", "comp_a2 = max(upper_left_a[i], lower_right_a[i]) # b \"left\" comp_b1 = min(upper_left_b[i], lower_right_b[i]) # b", "n = 10000 # k = int(round((20000 * math.log(20000, 2)) ** (1 /", "mbr_list] points = upper_left_points + lower_right_points min_components = [] max_components = [] for", "# overall_str_list = [node.getEntry().getMBR().toString(), str(node)] else: overall_str_list = [] if node.getNumChildren() == 0", "entry) entry_collection3, entry_collection4, dimension, do_fail = result2 # raise Exception() if do_fail ==", "def isEmpty(self): return len(self.heap) == 0 def peek(self): heap = self.heap pair =", "((803, 233, 521), (1314, 717, 1487)), \\ ((660, 268, 962), (1293, 619, 1521)),", "# and to add all-start-rectangles close-ancestor finding, # which for a well-formed r-tree,", "node1.setEntry(entry1) tree.insert(entry1) mbr2 = RawMBR(point2, (110, 200, 100), point2) node2 = RTreeNode(None, [],", "= (1536 * 0.2) / 2 x1 = 0 y1 = 0 x2", "window_size_pairs = [(window_left_sizes[i], len(entries) - window_left_sizes[i]) for i in range(len(window_left_sizes))] window_size_pairs = [x", "re-structure and modify adjustTree(); # stop at root instead of non-existent parent of", "in 56.672 sec. for pypy with m = 8 and M = 16", "# to find the entry containing node; just look at parent of entry", "x[0]], [y.getMBR() for y in x[1]]) for x in low_comp_distributions] low_mbr_pairs = [(CompositeMBR.makeMBR(x[0]),", "= S_comp_value d_S_pairs = S_comp_dict.items() min_S_value = min([x[1] for x in d_S_pairs]) min_S_value_d_S_pair_candidates", "min-pq # priority = (first_priority_component, second_priority_component) if curr_mbr.isRaw() == True: priority = -1", "self.lower_right def getIDValue(self): return self.id_value class Point: def __init__(self, vec, id_value): self.vec =", "n = 1000 # 1.1649 seconds (~2.72x slower for 10x growth; expected 33x", "is_non_traditional_leaf_node = (self.getParent() == None and self.getNumChildren() == 0) or (self.getNumChildren() != 0", "k for k in range(1, M - 2 * m + 2 +", "image.fillColor(\"none\") image.strokeWidth(4) multiplier = 3 * 0.8 # offset = (768 * 0.2)", "of RN that cover E.mbr # follow the corresponding subtrees unti lthe leaf", "reference_mbr, result_entry_list, ignore_entry): conflict_x_tree = RTree() internal_node_stack_deque = deque() # while len(heap) !=", "= mbr def getChild(self): return self.child def setChild(self, node): self.child = node @staticmethod", "node.getParent(), [entry], False) else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(), resulting_entries_from_split, have_resulting_second_entry_from_split) \"\"\" @staticmethod def adjustTree(tree,", "time at worst; # and to add all-start-rectangles close-ancestor finding, # which for", "in matching_combined_area_tagged_next_candidate_distributions] chosen_distribution_pair = next_next_candidates[0] return chosen_distribution_pair def chooseLeaf(self, entry): return self.chooseLeafHelper(entry, self.getRootEntry().getChild())", "entries: child = entry.getChild() child_str = self.toStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str =", "entry): if entry.getMBR().isRaw() == True: if entry.getMBR().doesEnclose(mbr) == True: return True else: entries", "def getMarginValue(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() if self.getDimension() == 0: raise", "= (60, 60) lower_right = (80, 80) elif i % 4 == 3:", "node if node.isUnderfull() == False: # print \"not underfull\" parent = node.getParent() curr_entries", "for n = 10000 # k = int(round((20000 * math.log(20000, 2)) ** (1", "if have_node_str == True: curr_depth = \"-\" if node.getNumEntries() != 0 else str(depth)", "else: overall_str_list = [] if node.getNumChildren() == 0 else [node.getEntry().getMBR().toString()] # overall_str_list =", "mbr = entry.getMBR() location = Point.toPoint(mbr) x, y = location multiplier = 1", "self.getRootEntry()) l.setParent(next_root) ll.setParent(next_root) self.getRootEntry().setChild(next_root) else: pass MAX_OVERLAP_RATIO = 0.2 def xtreeSplitNode(self, node, entry):", "ee = split_result return tree.adjustTree(tree, l, [e, ee], True, False) else: return (False,", "start_rectangle_nodes = [x for x in self.getNodes() if x.getEntry().getMBR().isRaw() == True] start_rectangle_entries =", "= heap[0] result = pair return result def toList(self): pair_list = self.heap items", "random.random() * 100) x = random.randint(0, 10000) y = random.randint(0, 10000) # upper_left", "entry_collection3, entry_collection4, dimension, do_fail = result2 # raise Exception() if do_fail == True", "[x.getChild() for x in partner_entries] partner_mbr_list = [x.getMBR() for x in partner_entries] partner_tight_overall_mbr", "__init__(self, upper_left, lower_right): self.upper_left = upper_left self.lower_right = lower_right def isRaw(self): return False", "self.getRootEntry().getChild().getNumChildren() == 0: root_node = RTreeNode(None, [], True) root_mbr = CompositeMBR(None, None, None)", ">= comp_b1 if do_overlap == False: break return do_overlap @staticmethod def findOverlapArea(mbr_a, mbr_b):", "* x1 + offset, multiplier * y1 + offset) next_x2, next_y2 = (multiplier", "resulting_entries_from_split, have_resulting_second_entry_from_split) \"\"\" @staticmethod def adjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split, is_first_call_after_first_pass): if node ==", "do occasionally # note that M of two works import sys # import", "self.resolveEnlargementTie(candidate_entries, entry) \"\"\" chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return chosen_entry def xtreeInsert(self,", "curr_mbr_area # min-pq # priority = (first_priority_component, second_priority_component) if curr_mbr.isRaw() == True: priority", "tagged_enlargement_values = [(MBR.getAreaEnlargement(x, mbr), x) for x in mbr_list] enlargement_values = [x[0] for", "for i in xrange(1000): upper_left = (0, 0) lower_right = (10, 10) mbr", "= \"(\" + string.join(overall_str_list, \" \") + \")\" return overall_str def toString(self): root", "tree2.getAllRectangleCloseAncestors() time2 = time.time() time_diff = time2 - time1 print \"time difference:\", time_diff,", "by reference rectangle; # check explicitly for this case if reference_mbr.doesEnclose(mbr) == False:", "entries = root.getEntries() chosen_entry = entries[0] chosen_child = chosen_entry.getChild() self.setRoot(chosen_child) \"\"\" # if", "print len(tree2.getNodes()) import time time1 = time.time() result = tree2.getAllRectangleCloseAncestors() time2 = time.time()", "= self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\" chosen_entry", "next_x2, next_y2 = (multiplier * x2 + offset, multiplier * y2 + offset)", "next_x + offset center_y = next_y + offset radius = 2 perimeter_x =", "parent = curr_node.getParent() \"\"\" if parent != None and (node in parent.getChildren()): pass", "\"split\" return (RTree.SPLIT, [node]) elif node.isNonTraditionalLeafNode() == True: node.addEntry(entry) entry.getChild().setParent(node) \"\"\" elif node.getNumChildren()", "entry8 = RTreeEntry(mbr8, node8) node8.setEntry(entry8) # problem here tree.insert(entry8) print tree.toString() print tree.doEnclosureQuery(curr_mbr2)", "xrange(self.getDimension()): comp_1a = upper_left[i] comp_1b = lower_right[i] term1 = comp_1b - comp_1a for", "comp2 = lower_right[i] side = comp2 - comp1 sides.append(side) area = reduce(lambda x,", "contains E is found # remove E from L # call algorithm condenseTree(L)", "# if the root has only one child (and it is not a", "= node.getEntries() candidate_entries = None # if node.isLeafNode() == True: candidate_entries = self.chooseEntriesWithMinimalOverlapEnlargement(entries,", "[x.getChild().getNumEntries() == 0 for x in self.getEntries()]) return is_non_traditional_leaf_node \"\"\" def isTraditionalLeafNode(self): is_traditional_leaf_node", "def toNumChildrenStringHelper(self, node): if node == None: return \"\" entries = node.getEntries() children", "offset = (1536 * 0.2) / 2 x1 = 0 y1 = 0", "getNumChildren(self): return self.getNumEntries() def setParent(self, node): self.parent = node def isNonTraditionalLeafNode(self): is_non_traditional_leaf_node =", "lower_rights[i] mbr = RawMBR(upper_left, lower_right, None) node = RTreeNode(None, [], True) entry =", "def delete(self, E, RN): def findLeaf(self, entry): return self.findLeafHelper(entry, self.getRootEntry()) def findLeafHelper(self, entry,", "def chooseEntriesWithMinimalAreaEnlargement(self, entries, entry): mbr_to_entry_dict = {} for i in range(len(entries)): curr_entry =", "self.M def isFull(self): return self.getNumEntries() >= self.getMaximumNumEntriesPerNode() def isUnderfull(self): return self.getNumEntries() < self.getMinimumNumEntriesPerNode()", "multiplier * y image.strokeColor(\"none\") image.fillColor(\"black\") center_x = next_x + offset center_y = next_y", "is_leaf self.m = 8 self.M = 16 self.child_to_entry_dict = {} for curr_entry in", "= tuple(max_components) result_mbr = CompositeMBR(upper_left_point, lower_right_point, component_mbr_list) return result_mbr class HyperRectangle: def __init__(self,", "i in xrange(10): upper_left = (20, 20) lower_right = (40, 40) mbr =", "image.strokeColor(color) image.fillColor(\"none\") image.strokeWidth(4) image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2)) if len(entries) == 0: parent =", "RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2 = result parent = curr_node.getParent() \"\"\" if", "# for n = 100 # k = int(round((1000 * math.log(1000, 2)) **", "point result_mbr = RawMBR(upper_left, lower_right, point) return result_mbr def getContainedItem(self): return self.contained_item def", "(-1 if curr_mbr_is_contained == True else 1) * curr_mbr_area # min-pq # priority", "return result_list @staticmethod def rstarChooseSplitAxis(entries, M, m): result = RTree.rstarGenDistributions(entries, M, m) S_comp_dict", "in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) self.condenseTreeHelper(node.getParent(), Q) return # not tested #", "added_nodes = result curr_entry = node.getEntry() curr_mbr = curr_entry.getMBR() mbr = entry.getMBR() next_mbr", "leaf_node = child_node.getParent() if entry != self.getRootEntry() else None if leaf_node == None:", "with parent pointers for xtreeInsert(); # have supernode demotion when size decreases to", ">= m and x[1] <= M and x[1] >= m] for i in", "CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) partner_entry = None if have_resulting_second_entry_from_split == True: first_entry, second_entry = resulting_entries_from_split", "in close_descendant_entries: start_rectangle_to_close_ancestor_entries_dict[close_descendant_entry].append(start_rectangle_entry) return start_rectangle_to_close_ancestor_entries_dict def draw(self): # im = Image.new(\"RGB\", (512, 512),", "comp2 - comp1 sides.append(side) area = reduce(lambda x, y: x * y, sides)", "MBR.doOverlap(curr_mbr, x[0]) == True] for tagged_overlapped_mbr in tagged_overlapped_mbr_list: curr_mbr, curr_entry = tagged_overlapped_mbr curr_node", "demote super-node if necessary if node.isSuperNode() == True and node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): node.setToSuperNode(False)", "print \"not underfull\" parent = node.getParent() curr_entries = node.getEntries() entry = parent.retrieveEntryForChild(node) children", "= mbr_b.getLowerRight() do_overlap = True # assume that rectangles never have negative area", "curr_child = entry.getChild() (self.child_to_entry_dict)[curr_child] = entry def removeEntry(self, entry): curr_child = entry.getChild() (self.child_to_entry_dict).pop(curr_child)", "x1 = upper_left[0] x2 = lower_right[0] margin = x2 - x1 return margin", "(parent.getNumChildren() + 2) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(entry) parent.addEntry(partner_entry) entry.getChild().setParent(parent) partner_entry.getChild().setParent(parent) return tree.adjustTree(tree, parent, [entry],", "if have_node_str == True: curr_leaf_status = str(node.getNumChildren()) overall_str_list = [curr_leaf_status] else: overall_str_list =", "def __init__(self, upper_left, lower_right): self.upper_left = upper_left self.lower_right = lower_right def isRaw(self): return", "= [mbr_to_entry_dict[x[1]] for x in candidate_tagged_enlargement_values] return candidate_entries def resolveEnlargementTie(self, entries, entry): mbr", "True: first_entry, second_entry = resulting_entries_from_split partner_entry = second_entry if have_resulting_second_entry_from_split == True and", "mbr = RawMBR(upper_left, lower_right, None) node = RTreeNode(None, [], True) entry = RTreeEntry(mbr,", "location = Point.toPoint(mbr) x, y = location multiplier = 1 / (1.0 *", "= 0 else: overlap_ratio = overlap_area / (1.0 * union_area) # raise Exception()", "= 4 # n = 1,000 works in 2.996 sec. for pypy with", "= RTreeNode(None, [], True) entry8 = RTreeEntry(mbr8, node8) node8.setEntry(entry8) # problem here tree.insert(entry8)", "# ignore node if associated mbr does not enclose reference mbr # and", "partner_entries] partner_mbr_list = [x.getMBR() for x in partner_entries] partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if", "node if associated mbr does not enclose reference mbr # and associated mbr", "else: split_result = tree.rstarSplitNode(parent, partner_entry) l, ll, e, ee = split_result return RTree.rstarAdjustTreeHelper(tree,", "False: curr_mbr = entry.getMBR() entries = self.getEntries() tagged_mbr_list = [(x.getMBR(), x) for x", "without_borders = False): partial_result = [] self.doOverlapQueryHelper(mbr, self.getRootEntry(), partial_result, without_borders) return partial_result def", "id_value def getUpperLeft(self): return self.upper_left def getLowerRight(self): return self.lower_right def getIDValue(self): return self.id_value", "mbr = RawMBR(upper_left, lower_right, contained_item) return mbr def doesMatch(self, mbr): upper_left_matches = self.getUpperLeft()", "233, 521), (1314, 717, 1487)), \\ ((660, 268, 962), (1293, 619, 1521)), \\", "# raise Exception() # for entry in entries[0 : 15]: for entry in", "319, 789), (1877, 744, 791)), \\ ((1081, 1056, 1020), (1708, 1075, 1542)), \\", "ancestor:\", close_ancestor_entry.getMBR().toString() # raise Exception() # for entry in entries[0 : 15]: for", "(True, entry_collection1, entry_collection2, dimension) def xtreeTopologicalSplit(self, node, entry): m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M =", "max_comp_value = max(components) min_components.append(min_comp_value) max_components.append(max_comp_value) upper_left_point = tuple(min_components) lower_right_point = tuple(max_components) result_mbr =", "def doesMatch(self, mbr): upper_left_matches = self.getUpperLeft() == mbr.getUpperLeft() lower_right_matches = self.getLowerRight() == mbr.getLowerRight()", "isFull(self): return self.getNumEntries() >= self.getMaximumNumEntriesPerNode() def isUnderfull(self): return self.getNumEntries() < self.getMinimumNumEntriesPerNode() def retrieveEntryForChild(self,", "* 6.5) * 0.8 offset = (1536 * 0.2) / 2 next_x =", "# follow the corresponding subtrees unti lthe leaf L that contains E is", "= node.getEntries() mbr_list = [entry.getMBR()] for mbr in mbr_list: upper_left = mbr.getUpperLeft() lower_right", "maximal disjointedness # is not going to be good enough to cut down", "component_does_enclose = left_value1 <= left_value2 and right_value1 >= right_value2 if component_does_enclose == False:", "int(round((5500 * math.log(5500, 2)) ** (1 / 3.0) / denominator)) # for n", "this far, we should add children to priority queue entries = node.getEntries() priority_tagged_internal_entries", "r-tree and O(n * log(n)) time at worst; # and to add all-start-rectangles", "x in candidate_tagged_enlargement_values] return candidate_entries def resolveEnlargementTie(self, entries, entry): mbr = entry.getMBR() tagged_mbr_list", "multiplier + offset \"\"\" # image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2)) image.write(\"tree.png\") def main(): point1", "node.addEntry(entry) entry.getChild().setParent(node) \"\"\" elif node.getNumChildren() == 0: pass return (RTree.NO_SPLIT, [node]) \"\"\" follow", "entry.getChild().setParent(parent) split_result = tree.splitNode(parent, partner_entry) l, ll, e, ee = split_result return tree.adjustTree(tree,", "node else: return node.getParent() else: entries = node.getEntries() candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if", "Q.append(keep_node) # only makes sense to speak of modifying mbr if we plan", "don't need a O(log(n)) time operation # to find the entry containing node;", "\"\"\" entry = parent.retrieveEntryForChild(node) children = [x.getChild() for x in curr_entries] mbr_list =", "RTreeNode(None, [], True) entry2 = RTreeEntry(mbr2, node2) node2.setEntry(entry2) tree.insert(entry2) mbr3 = RawMBR(point3, (110,", "0 low_constituent_mbr_list_pairs = [([y.getMBR() for y in x[0]], [y.getMBR() for y in x[1]])", "# check explicitly for this case if reference_mbr.doesEnclose(mbr) == False: continue # kick", "next_root = RTreeNode(None, resulting_entries_from_split, False, self.getRootEntry()) l.setParent(next_root) ll.setParent(next_root) self.getRootEntry().setChild(next_root) else: pass MAX_OVERLAP_RATIO =", "add children to priority queue entries = node.getEntries() priority_tagged_internal_entries = [] for curr_entry", "O(log(n)) time on average # for start rectangle taken from set of actual", "* 0.2) / 2 offset = (1536 * 0.2) / 2 x1 =", "in xrange(len(window_left_sizes))] curr_tuple = (low_distributions, upper_distributions) result_list.append(curr_tuple) return result_list @staticmethod def rstarChooseSplitAxis(entries, M,", "\")\" return overall_str def toDepthString(self): root = self.getRootEntry().getChild() return self.toDepthStringHelper(root, 0) def toDepthStringHelper(self,", "print tree.doEnclosureQuery(curr_mbr2) curr_mbr3 = RawMBR((50, 100, 0), (110, 200, 100), None) print tree.doContainmentQuery(curr_mbr3)", "if associated mbr does not enclose reference mbr # and associated mbr is", "== None: return (None, None, None, True) else: m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M =", "x[1]]), x) for x in next_candidate_distributions] mbr_pair_tagged_next_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for x", "else: parent.addEntry(entry) entry.getChild().setParent(parent) split_result = tree.splitNode(parent, partner_entry) l, ll, e, ee = split_result", "or below M # updated on 2016-11-06 to add single-start-rectangle-based # close-descendant finding", "lower_right, contained_item) return mbr def doesMatch(self, mbr): upper_left_matches = self.getUpperLeft() == mbr.getUpperLeft() lower_right_matches", "entry_collection1, entry_collection2, dimension = split_result if was_successful == True: mbr_collection1 = [x.getMBR() for", "entry_collection2, dimension) def xtreeTopologicalSplit(self, node, entry): m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall", "/ 2 next_x1, next_y1 = (multiplier * x1 + offset, multiplier * y1", "node2.setParent(next_root) pass return (node1, node2, entry1, entry2) @staticmethod def rstarPreadjustTree(self, leaf_node): node =", "split_history_root_dimension self.is_supernode = is_supernode self.entry = entry def getEntry(self): return self.entry def setEntry(self,", "tree2.insert(entry) \"\"\" if entry.getChild().getParent() == None: raise Exception() \"\"\" # print tree.toString() #", "x) for x in mbr_list] enlargement_values = [x[0] for x in tagged_enlargement_values] min_enlargement_value", "return self.contained_item def getMBRList(self): return [self] def clone(self): upper_left = self.getUpperLeft() lower_right =", "= self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries() axis = RTree.rstarChooseSplitAxis(E_overall, M, m) result = RTree.rstarChooseSplitIndex(E_overall,", "mbr.doesEnclose(entry.getMBR()) == True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for curr_entry in entries: if", "x in mbr_pair_tagged_next_candidate_distributions] combined_area_values = [x[0] for x in combined_area_tagged_next_candidate_distributions] min_combined_area_value = min(combined_area_values)", "2016-08-23 to fix traditional/non-traditional isLeafNode() distinction # updated on 2016-08-25 to fix overlap", "and node == self.getRootEntry().getChild(): # if node.getNumChildren() == 0: # if node.isNonTraditionalLeafNode() ==", "[], True) entry5 = RTreeEntry(mbr5, node5) node5.setEntry(entry5) tree.insert(entry5) mbr6 = RawMBR(point6, (110, 200,", "= {} for i in range(len(entries)): curr_entry = entries[i] curr_mbr = curr_entry.getMBR() mbr_to_entry_dict[curr_mbr]", "else: parent = node.getParent() curr_entries = node.getEntries() entry = None \"\"\" if node.getParent()", "= RTreeEntry(CompositeMBR.makeMBR(mbr_collection1), None) node1 = RTreeNode(parent, entry_collection1, None, entry1) entry1.setChild(node1) entry2 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection2),", "node_list \"\"\" def getUnionArea(self): pass \"\"\" # takes O(log(n)) time on average for", "min(components) max_comp_value = max(components) min_components.append(min_comp_value) max_components.append(max_comp_value) upper_left_point = tuple(min_components) lower_right_point = tuple(max_components) result_mbr", "next_y1 = y1 * multiplier + offset next_x2 = x2 * multiplier +", "2016-11-16 to fix margin calculation # note that we assume rectangles are unique", "occasion, # if containment query for conflict x-tree returns entries matching_entries = conflict_x_tree.doContainmentQuery(mbr)", "shorten tree entries = root.getEntries() chosen_entry = entries[0] chosen_child = chosen_entry.getChild() self.setRoot(chosen_child) \"\"\"", "estimates; the reason is that # otherwise the directed graph implied by the", "tree.toString() result = tree.getRectangleCloseDescendants(entry8) print result result = tree.getAllRectangleCloseAncestors() print result print len(result)", "[node.getEntry().getMBR().toString(), str(node)] for entry in entries: child = entry.getChild() child_str = self.toStringHelper(child) curr_str", "\"\"\" denominator = (100 * math.log(100, 2)) ** (1 / 3.0) k =", "perimeter_y = next_y1 + radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y)) else: image.strokeColor(color) image.fillColor(\"none\") image.strokeWidth(4)", "(self.child_to_entry_dict).pop(curr_child) def getMinimumNumEntriesPerNode(self): return self.m def getMaximumNumEntriesPerNode(self): return self.M def isFull(self): return self.getNumEntries()", "@staticmethod def draw(tree, entries, image, depth): for entry in entries: RTreeEntry.drawHelper(tree, entry, image,", "len(entries) > (M + 1): raise Exception() window_left_sizes = [m - 1 +", "lower_right) + [self.isRaw()]) return result def getDimension(self): return len(self.getUpperLeft()) def doesEnclose(self, mbr): dimension", "in result.items(): entry, close_ancestor_entry_list = entry_to_close_ancestor_entry_list_pair print \"start rectangle:\", entry.getMBR().toString() for close_ancestor_entry in", "or len(entry_collection4) < node.getMinimumNumEntriesPerNode(): return (False, None, None, dimension) else: return (True, entry_collection3,", "mbr2) area1 = mbr1.getArea() area2 = mbr2.getArea() union_area = area1 + area2 -", "return False else: entries = curr_entry.getChild().getEntries() for next_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), entry.getMBR())", "math.log(100, 2)) ** (1 / 3.0) k = 1 # k = int(round(denominator", "overall_str_list = [] if node.getNumChildren() == 0 else [node.getEntry().getMBR().toString(), str(node)] for entry in", "= tuple(min_components) lower_right_point = tuple(max_components) result_mbr = CompositeMBR(upper_left_point, lower_right_point, component_mbr_list) return result_mbr class", "= self.getLowerRight() == mbr.getLowerRight() result = upper_left_matches == True and lower_right_matches == True", "<= M and x[0] >= m and x[1] <= M and x[1] >=", "node == self.getRootEntry().getChild(): return node else: return node.getParent() else: entries = node.getEntries() candidate_entries", "actual rectangles # or leaves in r-tree; these times assume \"maximal disjointedness\" #", "else: # RN is an internal node # find all entries of RN", "= point lower_right = point result_mbr = RawMBR(upper_left, lower_right, point) return result_mbr def", "self.getDimension() does_enclose = True for i in xrange(dimension): left_value1 = self.getUpperLeft()[i] left_value2 =", "we do do occasionally # note that M of two works import sys", "# heapq.heappush(heap,pair) priority_tagged_internal_entries.sort(key = lambda x: x[0], reverse = True) for priority_tagged_internal_entry in", "4]: # for entry in entries[0 : 15]: for entry in entries: tree2.insert(entry)", "True: # ignore node if enclosing mbr exists in conflict x-tree continue if", "pass return (RTree.NO_SPLIT, [node]) \"\"\" follow = self.chooseSubtree(entry, node).getChild() result = self.xtreeInsertHelper(entry, follow)", "for this case if reference_mbr.doesEnclose(mbr) == False: continue # kick out close descendant", "return self.parent def getEntries(self): return (self.child_to_entry_dict).values() def getEntryForChild(self, child_node): return (self.child_to_entry_dict)[child_node] def getChildren(self):", "in entries: RTreeEntry.drawHelper(tree, entry, image, depth) @staticmethod def drawHelper(tree, entry, image, depth): node", "= RawMBR(point6, (110, 200, 100), point6) node6 = RTreeNode(None, [], True) entry6 =", "n = 6,000 works in 56.672 sec. for pypy with m = 8", "does not enclose reference mbr # and associated mbr is not contained within", "random.randint(0, k) * 100) # x2 = int(x1 + random.random() * 100) #", "result next_result = (entry_group1, entry_group2, axis, False) return next_result def xtreeSupernodeInsert(self, node, entries):", "= mbr.getUpperLeft() lower_right = mbr.getLowerRight() x1, y1 = upper_left x2, y2 = lower_right", "entries, image, 0) \"\"\" image.strokeColor(\"orange\") image.fillColor(\"none\") image.strokeWidth(4) multiplier = 3 * 0.8 #", "* log(n)) time, # where n is number of actual rectangles or leaves;", "upper_left = (20, 20) lower_right = (40, 40) mbr = RawMBR(upper_left, lower_right, None)", "curr_entry, partial_result, without_borders) # returns entries def doEnclosureQuery(self, mbr): partial_result = [] self.doEnclosureQueryHelper(mbr,", "for entry in entries: child = entry.getChild() child_str = self.toDepthStringHelper(child, depth + 1)", "1.1649 seconds (~2.72x slower for 10x growth; expected 33x slower) # n =", "# import PythonMagick import heapq from collections import deque # min-pq class PriorityQueue:", "time; # these times involve n, which is number of actual rectangles #", "* (y2 - y1) return margin surface_area = 0 for i in xrange(self.getDimension()):", "max(0, min(comp_a2, comp_b2) - max(comp_a1, comp_b1)) sides.append(side) intersection_volume = reduce(lambda x, y: x", "rstarChooseSplitAxis(entries, M, m): result = RTree.rstarGenDistributions(entries, M, m) S_comp_dict = {} for i", "as internal nodes can temporarily look like leaf nodes # keep_nodes = [x", "entry_collection2] # this line presumes that we have parent set correctly for a", "100, 0) point5 = (70, 100, 0) point6 = (80, 100, 0) point7", "depth): if node == None: return \"\" entries = node.getEntries() children = node.getChildren()", "rectangles # or leaves in r-tree; these times assume \"maximal disjointedness\" # and", "= RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry) \"\"\" for i in xrange(10): upper_left = (20,", "min_components = [] max_components = [] for i in xrange(base_mbr.getDimension()): components = [x[i]", "heapq.heappush(self.heap,pair) def pop(self): (priority,item) = heapq.heappop(self.heap) return item def isEmpty(self): return len(self.heap) ==", "import PythonMagick import heapq from collections import deque # min-pq class PriorityQueue: def", "True) entry7 = RTreeEntry(mbr7, node7) node7.setEntry(entry7) tree.insert(entry7) mbr8 = RawMBR(point8, (110, 200, 100),", "curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \" \") + \")\"", "root_entry.getMBR() root_mbr_is_actual = root_mbr.isRaw() root_mbr_is_contained = reference_mbr.doesEnclose(root_mbr) root_mbr_area = root_mbr.getArea() first_priority_component = 0", "E.mbr # else: # RN is an internal node # find all entries", "curr_entry.getMBR() curr_mbr = MBR.getEnlargedMBR(base_mbr, mbr) tagged_mbr_list.append((curr_mbr, curr_entry)) tagged_area_values = [(x[0].getArea(), x[1]) for x", "second_priority_component) priority_tagged_internal_entry = (priority, curr_entry) priority_tagged_internal_entries.append(priority_tagged_internal_entry) # item = curr_entry # pair =", "mbr if we plan on keeping the node if node.isUnderfull() == False: #", "512), \"white\") \"\"\" im = Image.new(\"RGB\", (768, 768), \"white\") draw = ImageDraw.Draw(im) root", "[] for i in xrange(base_mbr.getDimension()): components = [x[i] for x in points] min_comp_value", "2: upper_left = (60, 60) lower_right = (80, 80) elif i % 4", "for x in candidate_distributions] mbr_pair_tagged_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for x in mbr_list_pair_tagged_candidate_distributions]", "= node.getChildren() have_node_str = True overall_str_list = None if have_node_str == True: curr_depth", "curr_depth = \"-\" if node.getNumEntries() != 0 else str(depth) overall_str_list = [curr_depth] else:", "xrange(4): \"\"\" ul_lr_pairs = [((797, 989, 602), (910, 1248, 1035)), \\ ((920, 974,", "without_borders) return partial_result def doOverlapQueryHelper(self, mbr, entry, partial_result, without_borders): if entry.getMBR().isRaw() == True:", "root_mbr_is_actual = root_mbr.isRaw() root_mbr_is_contained = reference_mbr.doesEnclose(root_mbr) root_mbr_area = root_mbr.getArea() first_priority_component = 0 if", "True: # print \"supernode encountered\" parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) Q.append(node) # raise Exception()", "x[1] >= m] for i in xrange(entries[0].getMBR().getDimension()): low_sorted_entries = entries[ : ] low_sorted_entries.sort(key", "entry_collection2] mbr1 = CompositeMBR.makeMBR(mbr_collection1) mbr2 = CompositeMBR.makeMBR(mbr_collection2) overlap_area = MBR.findOverlapArea(mbr1, mbr2) area1 =", "= node.getEntries() children = node.getChildren() have_node_str = True overall_str_list = None if have_node_str", "False def isComposite(self): return False def getUpperLeft(self): return self.upper_left def getLowerRight(self): return self.lower_right", "# min-pq priority = (first_priority_component, second_priority_component) # priority = -1 * root_mbr_area #", "\\ ((660, 268, 962), (1293, 619, 1521)), \\ ((798, 928, 1028), (1762, 1795,", "> node.getMaximumNumEntriesPerNode(): split_result = self.xtreeSplitNode(node, entry) was_successful, entry_collection1, entry_collection2, dimension = split_result if", "only one child (and it is not a leaf) # remove the root", "= self.heap pair = heap[0] result = pair return result def toList(self): pair_list", "10), (6, 10, 10), (9, 10, 10), (3, 10, 10), (1, 10, 10),", "to or below M # updated on 2016-11-06 to add single-start-rectangle-based # close-descendant", "of low-level to high-level; # wish to insert using order of high-level to", "mbr) node.getEntry().setMBR(next_mbr) # this parent-setting step is crucial # if node.isNonTraditionalLeafNode() == False:", "return partial_result def doContainmentQueryHelper(self, mbr, entry, partial_result): if entry.getMBR().isRaw() == True: # print", "upper_left_a[i] comp_a2 = lower_right_a[i] comp_b1 = upper_left_b[i] comp_b2 = lower_right_b[i] side = max(0,", "def isRaw(self): return True @staticmethod def makeMBRFromPoint(point): upper_left = point lower_right = point", "min([x[1] for x in d_S_pairs]) min_S_value_d_S_pair_candidates = [x for x in d_S_pairs if", "(priority,item) # if curr_mbr.doesEnclose(reference_mbr) == True or reference_mbr.doesEnclose(curr_mbr) == True: # heapq.heappush(heap,pair) priority_tagged_internal_entries.sort(key", "= lambda x: x.getMBR().getUpperLeft()[i]) low_distributions = [(low_sorted_entries[ : window_left_sizes[j]], low_sorted_entries[window_left_sizes[j] : ]) for", "entry): mbr = entry.getMBR() tagged_mbr_list = [] for curr_entry in entries: base_mbr =", "** (1 / 3.0) k = 1 # k = int(round(denominator / denominator))", "1036), (785, 378, 1963)), \\ ((803, 1054, 307), (1776, 1597, 501)), \\ ((803,", "10, 10), (9, 10, 10), (6, 10, 10), (9, 10, 10), (3, 10,", "added_node.setParent(node) if node.getNumChildren() > node.getMaximumNumEntriesPerNode(): split_result = self.xtreeSplitNode(node, entry) was_successful, entry_collection1, entry_collection2, dimension", "<= M and x[1] >= m] for i in xrange(entries[0].getMBR().getDimension()): low_sorted_entries = entries[", "for x in added_nodes]) # print \"supernode #1\" return (RTree.SUPERNODE, [node]) elif split_status", "= child def getMBR(self): return self.mbr def setMBR(self, mbr): self.mbr = mbr def", "# and depth-first stack for internal nodes and # best-first priority queue for", "= RTreeNode(None, [], True) entry5 = RTreeEntry(mbr5, node5) node5.setEntry(entry5) tree.insert(entry5) mbr6 = RawMBR(point6,", "node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) # don't use isLeafNode() for this, as internal nodes can temporarily", "= node.getEntries() entry = node.getParent().retrieveEntryForChild(node) children = [x.getChild() for x in curr_entries] mbr_list", "margin calculation # note that we assume rectangles are unique for close-descendant #", "1795, 1309)), \\ ((225, 359, 290), (579, 950, 700)), \\ ((297, 196, 750),", "for y in x[1]]), x) for x in candidate_distributions] mbr_pair_tagged_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])),", "if x[0] <= M and x[0] >= m and x[1] <= M and", "= (10, 10) elif i % 4 == 1: upper_left = (20, 20)", "for close_ancestor_entry in close_ancestor_entry_list: print \"close ancestor:\", close_ancestor_entry.getMBR().toString() # raise Exception() # for", "= result[i] S_comp_value = 0 low_constituent_mbr_list_pairs = [([y.getMBR() for y in x[0]], [y.getMBR()", "PriorityQueue() heap = [] # entry_pq.push(root_entry, priority) item = root_entry pair = (priority,item)", "= Image.new(\"RGB\", (768, 768), \"white\") draw = ImageDraw.Draw(im) root = self.getRoot() root.draw(self, draw,", "from set of actual rectangles # for an r-tree and O(n * log(n))", "conflict x-tree continue if entry == ignore_entry: # ignore node if its entry", "self.doContainmentQueryHelper(mbr, self.getRootEntry(), partial_result) return partial_result def doContainmentQueryHelper(self, mbr, entry, partial_result): if entry.getMBR().isRaw() ==", "was_successful, entry_collection1, entry_collection2, dimension = split_result if was_successful == True: mbr_collection1 = [x.getMBR()", "getNodes(self): node_list = [] self.getNodesHelper(self.getRootEntry().getChild(), node_list) return node_list def getNodesHelper(self, node, partial_result): partial_result.append(node)", "offset \"\"\" # image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2)) image.write(\"tree.png\") def main(): point1 = (30,", "(9, 10, 10), (6, 10, 10), (9, 10, 10), (3, 10, 10), (1,", "print result result = tree.getAllRectangleCloseAncestors() print result print len(result) for entry_to_close_ancestor_entry_list_pair in result.items():", "curr_entry.getMBR().toString() # print \"tree:\", self.toString() self.insert(curr_entry) def condenseTreeHelper(self, node, Q): # demote super-node", "mbr.getUpperLeft() lower_right2 = mbr.getLowerRight() is_equal = upper_left1 == upper_left2 and lower_right1 == lower_right2", "= lower_right[i] term1 = comp_1b - comp_1a for j in xrange(i + 1,", "= (x, y) lower_right = (x, y) # upper_left = ul_lr_pairs[i][0] # lower_right", "corresponding subtrees unti lthe leaf L that contains E is found # remove", "return (self.child_to_entry_dict).values() def getEntryForChild(self, child_node): return (self.child_to_entry_dict)[child_node] def getChildren(self): return (self.child_to_entry_dict).keys() def getNumEntries(self):", "return (True, entry_collection3, entry_collection4, dimension) else: return (True, entry_collection1, entry_collection2, dimension) def xtreeTopologicalSplit(self,", "comp_b1, comp_b2 # do_overlap = True if without_borders == True: do_overlap = do_overlap", "[x.getMBR() in node.getEntries()] curr_x_tree = RTree() overlap_area_sum = sum([x.getArea() for x in mbr_list])", "root # set as new root its only child pass def condenseTree(self, leaf_node):", "item = curr_entry # internal_node_stack_deque.appendleft(item) priority = (first_priority_component, second_priority_component) priority_tagged_internal_entry = (priority, curr_entry)", "candidate_entries = [mbr_to_entry_dict[x[1]] for x in candidate_tagged_enlargement_values] return candidate_entries def chooseEntriesWithMinimalAreaEnlargement(self, entries, entry):", "curr_entry.getMBR().isRaw() == True: if entry == curr_entry: return True else: return False else:", "== True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for curr_entry in entries: if MBR.doOverlap(curr_entry.getMBR(),", "[x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) self.condenseTreeHelper(node.getParent(), Q) return #", "prefer_large_area_if_contained_else_small) if self.getRootEntry().getChild().getNumChildren() == 0: return [] reference_mbr = reference_entry.getMBR() root_entry = self.getRootEntry()", "# k = int(round((5500 * math.log(5500, 2)) ** (1 / 3.0) / denominator))", "r-tree; these times assume \"maximal disjointedness\" # and depth-first stack for internal nodes", "mbr): partial_result = [] self.doContainmentQueryHelper(mbr, self.getRootEntry(), partial_result) return partial_result def doContainmentQueryHelper(self, mbr, entry,", "else: m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries() axis = node.getSplitHistoryRootDimension()", "entries = node.getEntries() children = node.getChildren() have_node_str = True overall_str_list = None if", "\"PNG\") \"\"\" # image = PythonMagick.Image(PythonMagick.Geometry(\"768x768\"), \"white\") image = PythonMagick.Image(PythonMagick.Geometry(\"1536x1536\"), \"white\") root_entry =", "in entries: base_mbr = curr_entry.getMBR() curr_mbr = MBR.getEnlargedMBR(base_mbr, mbr) tagged_mbr_list.append((curr_mbr, curr_entry)) tagged_area_values =", "makes sense to speak of modifying mbr if we plan on keeping the", "/ 3.0) / denominator)) # for n = 10000 # k = int(round((20000", "xtreeTopologicalSplit(self, node, entry): m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries() axis", "in entries: curr_node = entry.getChild() node.addEntry(entry) # needed this curr_node.setParent(node) \"\"\" entries =", "upper_mbr_pairs] upper_margin_value_sum = sum(upper_margin_values) S_comp_value += upper_margin_value_sum S_comp_dict[i] = S_comp_value d_S_pairs = S_comp_dict.items()", "multiplier + offset next_x2 = x2 * multiplier + offset next_y2 = y2", "= None \"\"\" if node.getParent() == None: entry = tree.getRootEntry() else: entry =", "enlarged_mbr = MBR.getEnlargedMBR(base_mbr, mbr) enlarged_mbr_area = enlarged_mbr.getArea() area_change = enlarged_mbr_area - base_mbr_area return", "= mbr2.getArea() union_area = area1 + area2 - overlap_area ovelap_ratio = None if", "(self.getNumChildren() != 0 and False not in [x.getChild().getNumEntries() == 0 for x in", "# pair = (priority,item) # if curr_mbr.doesEnclose(reference_mbr) == True or reference_mbr.doesEnclose(curr_mbr) == True:", "margin def toString(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() result = str(list(upper_left +", "= None \"\"\" if i % 4 == 0: upper_left = (0, 0)", "entries.append(entry) for i in xrange(1000): upper_left = (0, 0) lower_right = (10, 10)", "self.getRootEntry().getChild().addEntry(ee) ll.setParent(self.getRootEntry().getChild()) else: split_result = self.rstarSplitNode(self.getRootEntry().getChild(), ee) l, ll, e, ee = split_result", "== None: return (False, []) else: parent = node.getParent() curr_entries = node.getEntries() entry", "has an actual rectangle # decide whether to include associated entry in result;", "and O(n * log(n)) time at worst; # and to add all-start-rectangles close-ancestor", "toLeafStatusStringHelper(self, node): if node == None: return \"\" entries = node.getEntries() children =", "distance = math.sqrt(change_x ** 2 + change_y ** 2) return distance class RTreeNode:", "print \"tree:\", self.toString() self.insert(curr_entry) def condenseTreeHelper(self, node, Q): # demote super-node if necessary", "partner_entries] partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if node.isLeafNode() == False: if have_resulting_second_entry_from_split == True:", "True: # print mbr.toString(), entry.getMBR().toString() if mbr.doesEnclose(entry.getMBR()) == True: partial_result.append(entry) else: entries =", "tree # has entry-aware nodes; made bug fix for adjustTree(); # fixed bug", "n = 10,000 works in 1 min. 54 sec. for pypy with m", "RTree.MAX_OVERLAP_RATIO: node.setToSuperNode(False) elif node.isUnderfull() == True: \"\"\" if node.isUnderfull() == True: # print", "[] max_components = [] for i in xrange(base_mbr.getDimension()): components = [x[i] for x", "in entries: if MBR.doOverlap(curr_entry.getMBR(), mbr) == True: self.doContainmentQueryHelper(mbr, curr_entry, partial_result) # prefix order", "point2) node2 = RTreeNode(None, [], True) entry2 = RTreeEntry(mbr2, node2) node2.setEntry(entry2) tree.insert(entry2) mbr3", "radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y)) children = [x.getChild() for x in entries] entry.draw(tree,", "* log(n)) time; # these times involve n, which is number of actual", "expected 317x slower) # n = 10000 # 84.222 seconds (~197x slower for", "self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries() axis = RTree.rstarChooseSplitAxis(E_overall, M, m) result", "to fix traditional/non-traditional isLeafNode() distinction # updated on 2016-08-25 to fix overlap logic", "RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) conflict_x_tree.insert(next_entry) elif node.isLeafNode() == False: # if we made it", "after removing this entry, this should be okay leaf_node.removeEntry(entry) self.condenseTree(leaf_node) # root =", "return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False) else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(), resulting_entries_from_split, have_resulting_second_entry_from_split) \"\"\" @staticmethod", "dimension = self.getDimension() does_enclose = True for i in xrange(dimension): left_value1 = self.getUpperLeft()[i]", "in entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) \"\"\" def rstarSplitNode(self, node, entry): curr_node = node", "reason is that # otherwise the directed graph implied by the r-tree #", "takes O(n * log(n)) time; # these times involve n, which is number", "and depth-first stack for internal nodes and # best-first priority queue for leaf", "partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if node.isLeafNode() == False: if have_resulting_second_entry_from_split == True: if", "def getEntry(self): return self.entry def setEntry(self, entry): self.entry = entry def isSuperNode(self): return", "in points] min_comp_value = min(components) max_comp_value = max(components) min_components.append(min_comp_value) max_components.append(max_comp_value) upper_left_point = tuple(min_components)", "= result parent = curr_node.getParent() \"\"\" if parent != None and (node in", "curr_entry.getChild().setParent(node2) mbr_group1 = [x.getMBR() for x in entry_group1] mbr_group2 = [x.getMBR() for x", "just look at parent of entry child if curr_entry.getMBR().isRaw() == True: if entry", "partner_mbr_list = [x.getMBR() for x in partner_entries] partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if have_resulting_second_entry_from_split", "is not contained within reference mbr continue if conflict_x_tree.doEnclosureQueryWithEarlyStopping(mbr) == True: # ignore", "entry = RTreeEntry(mbr, node) node.setEntry(entry) # entries.append(entry) \"\"\" # for entry in entries[0", "= node.isLeafNode() prev_leaf_status = None curr_node = node m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M =", "(110, 200, 100), point1) node1 = RTreeNode(None, [], True) entry1 = RTreeEntry(mbr1, node1)", "in mbr_pair_tagged_candidate_distributions] overlap_values = [x[0] for x in overlap_value_tagged_candidate_distributions] min_overlap_value = min(overlap_values) matching_overlap_value_tagged_candidate_distributions", "parent of root; # also, we implement delete(); note that our tree #", "= RawMBR((50, 100, 0), (110, 200, 100), None) print tree.doContainmentQuery(curr_mbr3) # raise Exception()", "= node.getEntries() entry = None if node.getParent() == None: entry = tree.getRootEntry() else:", "low_mbr_pairs] low_margin_value_sum = sum(low_margin_values) S_comp_value += low_margin_value_sum upper_constituent_mbr_list_pairs = [([y.getMBR() for y in", "in mbr_list]) for curr_mbr in mbr_list: next_mbr = RawMBR(curr_mbr.getUpperLeft(), curr_mbr.getLowerRight(), None) next_node =", "self.condenseTreeHelper(leaf_node, Q) # Q is in order of low-level to high-level; # wish", "== 2: color = PythonMagick.Color(0, 65535, 0, 32767) if upper_left == lower_right: image.strokeColor(\"none\")", "enlarged_mbr_area - base_mbr_area return area_change @staticmethod def doOverlap(mbr_a, mbr_b, without_borders = False): upper_left_a", "raise Exception() # print \"decision point\" \"\"\" if node.isSuperNode() == True: # print", "should be okay leaf_node.removeEntry(entry) self.condenseTree(leaf_node) # root = self.getRootEntry().getChild() \"\"\" if root.getNumChildren() ==", "super-node if node.isSuperNode() == True: # raise Exception() return (False, None, None, None)", "Exception() node.setToSuperNode(False) elif node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): mbr_list = [x.getMBR() in node.getEntries()] curr_x_tree =", "x in entries] mbr = entry.getMBR() tagged_enlargement_values = [(MBR.findOverlapArea(x, mbr), x) for x", "= chosen_entry.getChild() return self.rstarChooseLeafHelper(entry, chosen_child) def insert(self, entry): return self.xtreeInsert(entry) def chooseSubtree(self, entry,", "789), (1877, 744, 791)), \\ ((1081, 1056, 1020), (1708, 1075, 1542)), \\ ((358,", "have_node_str = True overall_str_list = None if have_node_str == True: curr_leaf_status = str(node.getNumChildren())", "# raise Exception() print tree.toString() # tree.delete(entry1) print tree.toString() # tree.delete(entry8) # tree.insert(entry1)", "55x growth; expected 317x slower) # n = 10000 # 84.222 seconds (~197x", "True: candidate_entries = self.chooseEntriesWithMinimalOverlapEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(candidate_entries, entry)", "entry.getChild().getParent() == None: raise Exception() \"\"\" # print tree.toString() # for entry in", "for x in mbr_list_pair_tagged_candidate_distributions] overlap_value_tagged_candidate_distributions = [(MBR.findOverlapArea(x[0][0], x[0][1]), x[1]) for x in mbr_pair_tagged_candidate_distributions]", "self.contained_item = contained_item def isRaw(self): return True @staticmethod def makeMBRFromPoint(point): upper_left = point", "def xtreeTopologicalSplit(self, node, entry): m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries()", "None, None, True) else: m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries()", "root node never has a raw mbr # leaf is a non-traditional leaf", "i in range(len(entries)): curr_entry = entries[i] curr_mbr = curr_entry.getMBR() mbr_to_entry_dict[curr_mbr] = curr_entry mbr_list", "None: entry = tree.getRootEntry() else: entry = parent.retrieveEntryForChild(node) children = [x.getChild() for x", "self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() == True] for keep_node in keep_nodes: Q.append(keep_node) # only makes", "x in partner_entries] partner_mbr_list = [x.getMBR() for x in partner_entries] partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list)", "image.fillColor(color) center_x = next_x1 center_y = next_y1 radius = 4 perimeter_x = next_x1", "self.getRectangleCloseDescendantsHelper(heap, reference_mbr, result_entry_list, reference_entry) return result_entry_list # def TopicKNearestNeighborBestFirstSearchHelper(self, heap, point, TopicKNearest, k):", "== True else 1 second_priority_component = (-1 if root_mbr_is_contained == True else 1)", "sys # import PythonMagick import heapq from collections import deque # min-pq class", "\")\" return overall_str def chooseEntriesWithMinimalOverlapEnlargement(self, entries, entry): mbr_to_entry_dict = {} for i in", "RTree() print tree.toString() curr_root = tree.getRootEntry().getChild() mbr1 = RawMBR(point1, (110, 200, 100), point1)", "1597, 501)), \\ ((803, 233, 521), (1314, 717, 1487)), \\ ((660, 268, 962),", "def push(self, item, priority): pair = (priority,item) heapq.heappush(self.heap,pair) def pop(self): (priority,item) = heapq.heappop(self.heap)", "= entry.getChild().getEntries() for curr_entry in entries: if curr_entry.getMBR().doesEnclose(mbr) == True: self.doEnclosureQueryHelper(mbr, curr_entry, partial_result)", "ll.setParent(next_root) self.getRootEntry().setChild(next_root) else: pass MAX_OVERLAP_RATIO = 0.2 def xtreeSplitNode(self, node, entry): # we", "elif color_choice == 2: color = PythonMagick.Color(0, 65535, 0, 32767) if upper_left ==", "CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if node.isLeafNode() == False: if have_resulting_second_entry_from_split == True: if (parent.getNumChildren() +", "n = 2000 # n = 1000 # n = 20000 n =", "sides) return area @staticmethod def getEnlargedMBR(base_mbr, mbr): mbr_list = [base_mbr, mbr] upper_left_points =", "[], True) entry8 = RTreeEntry(mbr8, node8) node8.setEntry(entry8) # problem here tree.insert(entry8) print tree.toString()", "x in entry_collection2] mbr1 = CompositeMBR.makeMBR(mbr_collection1) mbr2 = CompositeMBR.makeMBR(mbr_collection2) overlap_area = MBR.findOverlapArea(mbr1, mbr2)", "(1 / 3.0) k = 1 # k = int(round(denominator / denominator)) #", "= entry_to_close_ancestor_entry_list_pair print \"start rectangle:\", entry.getMBR().toString() for close_ancestor_entry in close_ancestor_entry_list: print \"close ancestor:\",", "= self.getLowerRight()[i] right_value2 = mbr.getLowerRight()[i] component_does_enclose = left_value1 <= left_value2 and right_value1 >=", "cut down branches explored; # to counter saturation, domain has to grow with", "= upper_left x2, y2 = lower_right margin = 2 * (x2 - x1)", "parent = node.getParent() curr_entries = node.getEntries() entry = None if node.getParent() == None:", "if parent != None: curr_entries = node.getEntries() entry = node.getParent().retrieveEntryForChild(node) children = [x.getChild()", "if overlap_ratio > RTree.MAX_OVERLAP_RATIO: # raise Exception() result2 = self.xtreeOverlapMinimalSplit(node, entry) entry_collection3, entry_collection4,", "entry_collection4, dimension, do_fail = result2 # raise Exception() if do_fail == True or", "Exception() return (False, None, None, None) dimension = None result1 = self.xtreeTopologicalSplit(node, entry)", "RawMBR(point6, (110, 200, 100), point6) node6 = RTreeNode(None, [], True) entry6 = RTreeEntry(mbr6,", "chosen_entry.getChild() return self.rstarChooseLeafHelper(entry, chosen_child) def insert(self, entry): return self.xtreeInsert(entry) def chooseSubtree(self, entry, node):", "= RTreeNode(None, [], True) entry = RTreeEntry(mbr, node) node.setEntry(entry) # entries.append(entry) \"\"\" #", "average for start rectangle # taken from set of actual rectangles for an", "doOverlapQuery(self, mbr, without_borders = False): partial_result = [] self.doOverlapQueryHelper(mbr, self.getRootEntry(), partial_result, without_borders) return", "return self.vec def getComponent(self, d): return self.getVec()[d] def getIDValue(self): return self.id_value import string", "[x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) @staticmethod def rstarAdjustTree(tree, node,", "next_x1, next_y1 = (multiplier * x1 + offset, multiplier * y1 + offset)", "= pair return result def toList(self): pair_list = self.heap items = [x[1] for", "node.getMaximumNumEntriesPerNode(): split_result = self.xtreeSplitNode(node, entry) was_successful, entry_collection1, entry_collection2, dimension = split_result if was_successful", "have_resulting_second_entry_from_split) @staticmethod def rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split): if node.getParent() == None: entry =", "setMBR(self, mbr): self.mbr = mbr def getChild(self): return self.child def setChild(self, node): self.child", "= (multiplier * x1 + offset, multiplier * y1 + offset) next_x2, next_y2", "next_entry) if result == True: return result return False def delete(self, entry): #", "E_overall, entry): # prev_leaf_status = node.isLeafNode() prev_leaf_status = None curr_node = node m", "x[1] <= M and x[1] >= m] for i in xrange(entries[0].getMBR().getDimension()): low_sorted_entries =", "min_enlargement_value] candidate_entries = [mbr_to_entry_dict[x[1]] for x in candidate_tagged_enlargement_values] return candidate_entries def chooseEntriesWithMinimalAreaEnlargement(self, entries,", "[self] def clone(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() contained_item = self.getContainedItem() mbr", "# this parent-setting step is crucial # if node.isNonTraditionalLeafNode() == False: # this", "should add children to priority queue entries = node.getEntries() priority_tagged_internal_entries = [] for", "= second_entry if have_resulting_second_entry_from_split == True: partner_node = partner_entry.getChild() partner_entries = partner_node.getEntries() partner_children", "14500 # x1 = int(100 + random.randint(0, k) * 100) # y1 =", "= reference_mbr.doesEnclose(curr_mbr) curr_mbr_area = curr_mbr.getArea() first_priority_component = 0 if curr_mbr_is_contained == True else", "overall_str_list = [curr_depth] else: overall_str_list = [] for entry in entries: child =", "[], True) entry = RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry) for i in xrange(1000): upper_left", "entries[ : ] upper_sorted_entries.sort(key = lambda x: x.getMBR().getLowerRight()[i]) upper_distributions = [(upper_sorted_entries[ : window_left_sizes[j]],", "x in tagged_area_values] min_area = min(area_values) candidate_tagged_area_values = [x for x in tagged_area_values", "entry.setMBR(tight_overall_mbr) self.condenseTreeHelper(node.getParent(), Q) return # not tested # returns entries # does intersection", "(x, y, z) # lower_right = lower_rights[i] mbr = RawMBR(upper_left, lower_right, None) node", "isRaw(self): return True @staticmethod def makeMBRFromPoint(point): upper_left = point lower_right = point result_mbr", "100 # 0.427 seconds (~1x slower for 1x growth; expected 1x slower) #", "root_entry): self.root_entry = root_entry def hasConsistentNonTraditionalLeafDepthValues(self): root = self.getRootEntry().getChild() curr_node = root depth", "chooseLeaf(self, entry): return self.chooseLeafHelper(entry, self.getRootEntry().getChild()) def chooseLeafHelper(self, entry, node): if node.isLeafNode() == True:", "in xrange(dimension): left_value1 = self.getUpperLeft()[i] left_value2 = mbr.getUpperLeft()[i] right_value1 = self.getLowerRight()[i] right_value2 =", "(50, 100, 0) point4 = (60, 100, 0) point5 = (70, 100, 0)", "can be None if no match is found # finds one match if", "root_mbr_area # min-pq priority = (first_priority_component, second_priority_component) # priority = -1 * root_mbr_area", "entries, entry): mbr_to_entry_dict = {} for i in range(len(entries)): curr_entry = entries[i] curr_mbr", "= RawMBR(point3, (110, 200, 100), point3) node3 = RTreeNode(None, [], True) entry3 =", "# raise Exception() print tree.doOverlapQuery(curr_mbr2) # raise Exception() print tree.toString() # tree.delete(entry1) print", "self.getMinimumNumEntriesPerNode() def retrieveEntryForChild(self, node): return (self.child_to_entry_dict)[node] def toString(self): return str(self.getEntries()) class RTreeEntry: def", "node.getEntry().setMBR(next_mbr) # this parent-setting step is crucial # if node.isNonTraditionalLeafNode() == False: #", "(low_distributions, upper_distributions) result_list.append(curr_tuple) return result_list @staticmethod def rstarChooseSplitAxis(entries, M, m): result = RTree.rstarGenDistributions(entries,", "def doEnclosureQueryWithEarlyStoppingHelper(self, mbr, entry): if entry.getMBR().isRaw() == True: if entry.getMBR().doesEnclose(mbr) == True: return", "== True: # raise Exception() return (False, None, None, None) dimension = None", "priority = (first_priority_component, second_priority_component) # priority = -1 * root_mbr_area # entry_pq =", "upper_left == lower_right: image.strokeColor(\"none\") image.fillColor(color) center_x = next_x1 center_y = next_y1 radius =", "= location multiplier = 1 / (1.0 * 6.5) * 0.8 offset =", "tree.getNodes() if x.isSuperNode() == True], tree.getRootEntry().getChild() # tree2.draw() print len(tree2.getNodes()) import time time1", "True: # heapq.heappush(heap,pair) priority_tagged_internal_entries.sort(key = lambda x: x[0], reverse = True) for priority_tagged_internal_entry", "= self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries() axis = RTree.rstarChooseSplitAxis(E_overall, M, m)", "partial_result): if entry.getMBR().isRaw() == True: if entry.getMBR().doesEnclose(mbr) == True: partial_result.append(entry) else: entries =", "= reference_entry.getMBR() root_entry = self.getRootEntry() root_node = root_entry.getChild() root_mbr = root_entry.getMBR() root_mbr_is_actual =", "xrange(len(window_left_sizes))] upper_sorted_entries = entries[ : ] upper_sorted_entries.sort(key = lambda x: x.getMBR().getLowerRight()[i]) upper_distributions =", "# if we made it this far, we should add to conflict x-tree", "print tree.doOverlapQuery(curr_mbr2) # raise Exception() print tree.toString() # tree.delete(entry1) print tree.toString() # tree.delete(entry8)", "and self.getNumChildren() == 0) or (self.getNumChildren() != 0 and False not in [x.getChild().getNumEntries()", "RN): def findLeaf(self, entry): return self.findLeafHelper(entry, self.getRootEntry()) def findLeafHelper(self, entry, curr_entry): \"\"\" if", "1, self.getDimension()): comp_2a = upper_left[j] comp_2b = lower_right[j] term2 = comp_2b - comp_2a", "= True for i in xrange(dimension): left_value1 = self.getUpperLeft()[i] left_value2 = mbr.getUpperLeft()[i] right_value1", "(1536 * 0.2) / 2 next_x = multiplier * x next_y = multiplier", "* y, sides) return intersection_volume def getMarginValue(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight()", "node.getEntries() mbr_list = [x.getMBR() for x in entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) \"\"\" def", "len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\" else: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry)", "1963)), \\ ((803, 1054, 307), (1776, 1597, 501)), \\ ((803, 233, 521), (1314,", "tree.toString() print tree.doEnclosureQuery(curr_mbr2) curr_mbr3 = RawMBR((50, 100, 0), (110, 200, 100), None) print", "curr_entry)) tagged_area_values = [(x[0].getArea(), x[1]) for x in tagged_mbr_list] area_values = [x[0] for", "entries, is_leaf, entry = None, split_history_root_dimension = None, is_supernode = False): self.parent =", "x-tree:\", conflict_x_tree.toString() # for a well-formed r-tree, this takes O(n * log(n)) time,", "__init__(self, vec, id_value): self.vec = vec self.id_value = id_value @staticmethod def toPoint(mbr): if", "(determined using points sampled) and assumed to be consistent # we never split", "False else: entries = curr_entry.getChild().getEntries() for next_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), entry.getMBR()) ==", "= [base_mbr, mbr] upper_left_points = [x.getUpperLeft() for x in mbr_list] lower_right_points = [x.getLowerRight()", "return result_entry_list # def TopicKNearestNeighborBestFirstSearchHelper(self, heap, point, TopicKNearest, k): def getRectangleCloseDescendantsHelper(self, heap, reference_mbr,", "disjointedness # is not going to be good enough to cut down branches", "if node.isSuperNode() == True: # raise Exception() return (False, None, None, None) dimension", "x-tree returns entries matching_entries = conflict_x_tree.doContainmentQuery(mbr) for matching_entry in matching_entries: # raise Exception()", "10), (9, 10, 10), (3, 10, 10), (1, 10, 10), (3, 10, 10)]", "that we have parent set correctly for a leaf, # which is not", "raise Exception() \"\"\" # print tree.toString() # for entry in entries[0 : 4]:", "if node.getNumChildren() == 0 else [node.getEntry().getMBR().toString()] # overall_str_list = [] if node.getNumChildren() ==", "close-descendant finding that takes O(log(n)) time on average # for start rectangle taken", "(first_priority_component, second_priority_component) # priority = -1 * root_mbr_area # entry_pq = PriorityQueue() heap", "10 # z = 10 # lower_right = (x, y, z) # lower_right", "def getNumChildren(self): return self.getNumEntries() def setParent(self, node): self.parent = node def isNonTraditionalLeafNode(self): is_non_traditional_leaf_node", "= RawMBR(upper_left, lower_right, None) node = RTreeNode(None, [], True) entry = RTreeEntry(mbr, node)", "= RTreeEntry(mbr5, node5) node5.setEntry(entry5) tree.insert(entry5) mbr6 = RawMBR(point6, (110, 200, 100), point6) node6", "x in start_rectangle_nodes] start_rectangle_to_close_ancestor_entries_dict = {} for start_rectangle_entry in start_rectangle_entries: start_rectangle_to_close_ancestor_entries_dict[start_rectangle_entry] = []", "[x for x in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() == True] keep_nodes = [x for", "[node.getEntry().getMBR().toString()] # overall_str_list = [node.getEntry().getMBR().toString(), str(node)] else: overall_str_list = [] if node.getNumChildren() ==", "= None, is_supernode = False): self.parent = parent self.is_leaf = is_leaf self.m =", "if curr_entry != entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node2) entry1 = RTreeEntry(curr_overall_mbr1, node1) entry2 = RTreeEntry(curr_overall_mbr2,", "@staticmethod def rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split): if node.getParent() == None: entry = tree.getRootEntry()", "in entries] mbr = entry.getMBR() tagged_enlargement_values = [(MBR.findOverlapArea(x, mbr), x) for x in", "in upper_constituent_mbr_list_pairs] upper_margin_values = [x[0].getMarginValue() + x[1].getMarginValue() for x in upper_mbr_pairs] upper_margin_value_sum =", "curr_mbr_area item = curr_entry pair = (priority,item) heapq.heappush(heap,pair) elif curr_mbr.isRaw() == False: if", "rectangles never have negative area for i in xrange(mbr_a.getDimension()): # a \"left\" comp_a1", "\"\"\" # assume item is in tree # returns a node, which can", "== True else 1) * root_mbr_area # min-pq priority = (first_priority_component, second_priority_component) #", "curr_entries = node.getEntries() entry = node.getParent().retrieveEntryForChild(node) children = [x.getChild() for x in curr_entries]", "\")\" return overall_str def toLeafStatusString(self): root = self.getRootEntry().getChild() return self.toLeafStatusStringHelper(root) def toLeafStatusStringHelper(self, node):", "== True: # print \"supernode encountered\" parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) Q.append(node) # raise", "(3, 10, 10), (1, 10, 10), (3, 10, 10)] # for i in", "not acyclic and we have cliques # note that we don't necessarily need", "== None: continue else: return curr_node return None \"\"\" # a little stilted", "= RTreeEntry(mbr3, node3) node3.setEntry(entry3) tree.insert(entry3) mbr4 = RawMBR(point4, (110, 200, 100), point4) node4", "= tuple(max_components) result_mbr_list = base_mbr.getMBRList() + [mbr] mbr = CompositeMBR(upper_left_point, lower_right_point, result_mbr_list) return", "== True: first_entry, second_entry = resulting_entries_from_split partner_entry = second_entry if have_resulting_second_entry_from_split == True", "sec. for pypy with m = 8 and M = 16 # n", "if (node.getParent() == None or (node.getParent() != None and node in node.getParent().getChildren())) ==", "[x[i] for x in points] min_comp_value = min(components) max_comp_value = max(components) min_components.append(min_comp_value) max_components.append(max_comp_value)", "self.getRootEntry(), partial_result, without_borders) return partial_result def doOverlapQueryHelper(self, mbr, entry, partial_result, without_borders): if entry.getMBR().isRaw()", "= item node = entry.getChild() mbr = entry.getMBR() if mbr.doesEnclose(reference_mbr) == False and", "<= comp_b2 and comp_a2 >= comp_b1 if do_overlap == False: break return do_overlap", "= self.chooseSubtree(entry, node).getChild() result = self.xtreeInsertHelper(entry, follow) split_status, added_nodes = result curr_entry =", "\"\"\" image.strokeColor(\"orange\") image.fillColor(\"none\") image.strokeWidth(4) multiplier = 3 * 0.8 # offset = (768", "# print \"hello\" did_find_leaf = self.findLeaf(entry) child_node = entry.getChild() # root node never", "has only one child (and it is not a leaf) # remove the", "# print \"underfull\" parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) # don't use isLeafNode() for this,", "\"-\" if node.getNumEntries() != 0 else str(depth) overall_str_list = [curr_depth] else: overall_str_list =", "entry.getMBR().doesEnclose(mbr) == True: return True else: entries = entry.getChild().getEntries() for curr_entry in entries:", "curr_entry.getChild() if curr_entry != entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node2) entry1 = RTreeEntry(curr_overall_mbr1, node1) entry2 =", "in xrange(entries[0].getMBR().getDimension()): low_sorted_entries = entries[ : ] low_sorted_entries.sort(key = lambda x: x.getMBR().getUpperLeft()[i]) low_distributions", "# wish to insert using order of high-level to low-level # Q =", "= [] for curr_entry in entries: # set priority correctly and add to", "= [x[0] for x in overlap_value_tagged_candidate_distributions] min_overlap_value = min(overlap_values) matching_overlap_value_tagged_candidate_distributions = [x for", "next_y2)) image.write(\"tree.png\") def main(): point1 = (30, 100, 0) point2 = (40, 100,", "def chooseSubtree(self, entry, node): entries = node.getEntries() candidate_entries = None # if node.isLeafNode()", "next_next_candidates = [x[1] for x in matching_overlap_value_tagged_candidate_distributions] if len(matching_overlap_value_tagged_candidate_distributions) > 1: next_candidate_distributions =", "and # high inter-group overlap means maximal disjointedness # is not going to", "[] for entry in entries: child = entry.getChild() child_str = self.toNumChildrenStringHelper(child) curr_str =", "have parent set correctly for a leaf, # which is not the case", "priority = (first_priority_component, second_priority_component) priority_tagged_internal_entry = (priority, curr_entry) priority_tagged_internal_entries.append(priority_tagged_internal_entry) # item = curr_entry", "= (40, 40) elif i % 4 == 2: upper_left = (60, 60)", "in [x.getChild().getNumEntries() == 0 for x in self.getEntries()]) is_leaf_node = self.getNumChildren() == 0", "Point: def __init__(self, vec, id_value): self.vec = vec self.id_value = id_value @staticmethod def", "2)) ** (1 / 3.0) / denominator)) # for n = 10000 #", "# leaf is a non-traditional leaf leaf_node = child_node.getParent() if entry != self.getRootEntry()", "def setMBR(self, mbr): self.mbr = mbr def getChild(self): return self.child def setChild(self, node):", "M - 2 * m + 2 + 1)] window_left_sizes = [x for", "node.getChildren() have_node_str = True is_root_node = node == self.getRootEntry().getChild() if is_root_node == True:", "tuple(max_components) result_mbr_list = base_mbr.getMBRList() + [mbr] mbr = CompositeMBR(upper_left_point, lower_right_point, result_mbr_list) return mbr", "None) node = RTreeNode(None, [], True) entry = RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry) \"\"\"", "root = self.getRootEntry().getChild() return self.toEntriesArePresentStringHelper(root) def toEntriesArePresentStringHelper(self, node): if node == None: return", "= mbr.getUpperLeft()[i] right_value1 = self.getLowerRight()[i] right_value2 = mbr.getLowerRight()[i] component_does_enclose = left_value1 <= left_value2", "result_entry_list, ignore_entry): conflict_x_tree = RTree() internal_node_stack_deque = deque() # while len(heap) != 0:", "None) dimension = None result1 = self.xtreeTopologicalSplit(node, entry) entry_collection1, entry_collection2, dimension = result1", "entry.getMBR() next_mbr = MBR.getEnlargedMBR(curr_mbr, mbr) node.getEntry().setMBR(next_mbr) # this parent-setting step is crucial #", "3.0) / denominator)) # for n = 10000 # k = int(round((20000 *", "def rstarSplitNodeHelper(self, node, E_overall, entry): # prev_leaf_status = node.isLeafNode() prev_leaf_status = None curr_node", "that contains E is found # remove E from L # call algorithm", "child pass def condenseTree(self, leaf_node): Q = [] self.condenseTreeHelper(leaf_node, Q) # Q is", "entry6 = RTreeEntry(mbr6, node6) node6.setEntry(entry6) tree.insert(entry6) mbr7 = RawMBR(point7, (110, 200, 100), point7)", "comp_b1 else: do_overlap = do_overlap and comp_a1 <= comp_b2 and comp_a2 >= comp_b1", "+ string.join(overall_str_list, \" \") + \")\" return overall_str def toLeafStatusString(self): root = self.getRootEntry().getChild()", "mbr_list]) for curr_mbr in mbr_list: next_mbr = RawMBR(curr_mbr.getUpperLeft(), curr_mbr.getLowerRight(), None) next_node = RTreeNode(None,", "close_ancestor_entry.getMBR().toString() # raise Exception() # for entry in entries[0 : 15]: for entry", "= [x[0] for x in combined_area_tagged_next_candidate_distributions] min_combined_area_value = min(combined_area_values) matching_combined_area_tagged_next_candidate_distributions = [x for", "consistent # we never split a super-node # updated on 2016-08-23 to fix", "entry): if node.getSplitHistoryRootDimension() == None: return (None, None, None, True) else: m =", "= \"(\" + string.join(overall_str_list, \" \") + \")\" return overall_str def chooseEntriesWithMinimalOverlapEnlargement(self, entries,", "200, 100), point5) node5 = RTreeNode(None, [], True) entry5 = RTreeEntry(mbr5, node5) node5.setEntry(entry5)", "# fixed bug with parent pointers for xtreeInsert(); # have supernode demotion when", "print len(result) for entry_to_close_ancestor_entry_list_pair in result.items(): entry, close_ancestor_entry_list = entry_to_close_ancestor_entry_list_pair print \"start rectangle:\",", "100, 0)) curr_mbr2 = RawMBR((50, 100, 0), (50, 100, 0), point3) curr_mbr2b =", "elif i % 4 == 2: upper_left = (60, 60) lower_right = (80,", "== min_enlargement_value] candidate_entries = [mbr_to_entry_dict[x[1]] for x in candidate_tagged_enlargement_values] return candidate_entries def resolveEnlargementTie(self,", "low-level # Q = list(set(Q)) Q.reverse() for curr_node in Q: curr_entry = curr_node.getEntry()", "candidates on occasion, # if containment query for conflict x-tree returns entries matching_entries", "result.items(): entry, close_ancestor_entry_list = entry_to_close_ancestor_entry_list_pair print \"start rectangle:\", entry.getMBR().toString() for close_ancestor_entry in close_ancestor_entry_list:", "set of actual rectangles for an r-tree; # takes O(n * log(n)) time", "def toEntriesArePresentStringHelper(self, node): if node == None: return \"\" entries = node.getEntries() children", "!= 0: (priority,item) = heapq.heappop(heap) elif len(internal_node_stack_deque) != 0: item = internal_node_stack_deque.popleft() #", "M of two works import sys # import PythonMagick import heapq from collections", "True) entry = RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry) \"\"\" for i in xrange(10): upper_left", "lower_right_matches == True return result class CompositeMBR(MBR): def __init__(self, upper_left, lower_right, mbr_list): MBR.__init__(self,", "0: item = internal_node_stack_deque.popleft() # (priority,item) = heapq.heappop(heap) entry = item node =", "RTreeEntry(mbr3, node3) node3.setEntry(entry3) tree.insert(entry3) mbr4 = RawMBR(point4, (110, 200, 100), point4) node4 =", "= x2 - x1 return margin if self.getDimension() == 2: x1, y1 =", "component_mbr_list) return result_mbr class HyperRectangle: def __init__(self, upper_left, lower_right, id_value): self.upper_left = upper_left", "= time.time() time_diff = time2 - time1 print \"time difference:\", time_diff, \"seconds\" #", "= enlarged_mbr.getArea() area_change = enlarged_mbr_area - base_mbr_area return area_change @staticmethod def doOverlap(mbr_a, mbr_b,", "0, 65535, 32767) elif color_choice == 2: color = PythonMagick.Color(0, 65535, 0, 32767)", "entry = None, split_history_root_dimension = None, is_supernode = False): self.parent = parent self.is_leaf", "upper_left_b[i] comp_b2 = lower_right_b[i] side = max(0, min(comp_a2, comp_b2) - max(comp_a1, comp_b1)) sides.append(side)", "these times involve n, which is number of actual rectangles # or leaves", "y in x[1]]), x) for x in next_candidate_distributions] mbr_pair_tagged_next_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1])", "is in tree # returns a node, which can be None if no", "entry2) @staticmethod def rstarPreadjustTree(self, leaf_node): node = leaf_node parent = node.getParent() if parent", "10, 10), (6, 10, 10), (9, 10, 10), (3, 10, 10), (1, 10,", "retrieveEntryForChild(self, node): return (self.child_to_entry_dict)[node] def toString(self): return str(self.getEntries()) class RTreeEntry: def __init__(self, mbr,", "in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return (have_resulting_second_entry_from_split, resulting_entries_from_split) else: parent = node.getParent()", "entry): leaf_node = self.rstarChooseLeaf(entry) adjust_result = None if leaf_node.isFull() == False: leaf_node.addEntry(entry) entry.getChild().setParent(leaf_node)", "True: if (parent.getNumChildren() + 1) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(partner_entry) partner_entry.getChild().setParent(parent) return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry],", "pypy with m = 8 and M = 16 # these numbers are", "entry, close_ancestor_entry_list = entry_to_close_ancestor_entry_list_pair print \"start rectangle:\", entry.getMBR().toString() for close_ancestor_entry in close_ancestor_entry_list: print", "finding that takes O(log(n)) time on average # for start rectangle taken from", "1528x slower) # n = 2000 # n = 1000 # n =", "# k = int(round((20000 * math.log(20000, 2)) ** (1 / 3.0) / denominator))", "619, 1521)), \\ ((798, 928, 1028), (1762, 1795, 1309)), \\ ((225, 359, 290),", "tree.insert(entry5) mbr6 = RawMBR(point6, (110, 200, 100), point6) node6 = RTreeNode(None, [], True)", "average # for start rectangle taken from set of actual rectangles # for", "to fix overlap logic for determining when to attempt an overlap-minimal split #", "-1 * curr_mbr_area item = curr_entry pair = (priority,item) heapq.heappush(heap,pair) elif curr_mbr.isRaw() ==", "= [x for x in window_left_sizes if x <= M and x >=", "True: if entry.getMBR().doesEnclose(mbr) == True: return True else: entries = entry.getChild().getEntries() for curr_entry", "entry_group2, axis) return next_result def xtreeOverlapMinimalSplit(self, node, entry): if node.getSplitHistoryRootDimension() == None: return", "node.isLeafNode() == False else \"+\" overall_str_list = [curr_leaf_status] else: overall_str_list = [] for", "node_list def getNodesHelper(self, node, partial_result): partial_result.append(node) for curr_node in node.getChildren(): self.getNodesHelper(curr_node, partial_result) def", "tagged_enlargement_values] min_enlargement_value = min(enlargement_values) candidate_tagged_enlargement_values = [x for x in tagged_enlargement_values if x[0]", "if we made it this far, we should add to conflict x-tree result_entry_list.append(entry)", "* 0.8 offset = (1536 * 0.2) / 2 next_x = multiplier *", "curr_entry in entries: if curr_entry.getMBR().doesEnclose(mbr) == True: self.doEnclosureQueryHelper(mbr, curr_entry, partial_result) def doEnclosureQueryWithEarlyStopping(self, mbr):", "# if node.getNumChildren() == 0: # if node.isNonTraditionalLeafNode() == True: if node.isLeafNode() ==", "entry, node): entries = node.getEntries() candidate_entries = None # if node.isLeafNode() == True:", "min_S_value_d_S_pair_candidates = [x for x in d_S_pairs if x[1] == min_S_value] chosen_d_S_pair =", "next_x1 center_y = next_y1 radius = 4 perimeter_x = next_x1 perimeter_y = next_y1", "= [] def push(self, item, priority): pair = (priority,item) heapq.heappush(self.heap,pair) def pop(self): (priority,item)", "= base_mbr.getArea() enlarged_mbr = MBR.getEnlargedMBR(base_mbr, mbr) enlarged_mbr_area = enlarged_mbr.getArea() area_change = enlarged_mbr_area -", "item node = entry.getChild() mbr = entry.getMBR() if mbr.doesEnclose(reference_mbr) == False and reference_mbr.doesEnclose(mbr)", "print tree.toString() tree2 = RTree() import random entries = [] # lower_rights =", "and x[1] >= m] for i in xrange(entries[0].getMBR().getDimension()): low_sorted_entries = entries[ : ]", "result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2 = result next_result = (entry_group1,", "10), (1, 10, 10), (8, 10, 10), (6, 10, 10), (9, 10, 10),", "not a leaf) # remove the root # set as new root its", "entry) \"\"\" chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return self.rstarChooseLeafHelper(entry, chosen_child) def insert(self,", "((808, 926, 151), (889, 1755, 320)), \\ ((945, 260, 1091), (1932, 332, 1133)),", "parent = node.getParent() if parent != None: curr_entries = node.getEntries() entry = node.getParent().retrieveEntryForChild(node)", "x[0] >= m and x[1] <= M and x[1] >= m] for i", "in case # print \"split\" return (RTree.SPLIT, [node]) elif node.isNonTraditionalLeafNode() == True: node.addEntry(entry)", "= {} for curr_entry in entries: curr_child = curr_entry.getChild() (self.child_to_entry_dict)[curr_child] = curr_entry self.split_history_root_dimension", "negative area for i in xrange(mbr_a.getDimension()): # a \"left\" comp_a1 = min(upper_left_a[i], lower_right_a[i])", "x in mbr_list] points = upper_left_points + lower_right_points min_components = [] max_components =", "(1314, 717, 1487)), \\ ((660, 268, 962), (1293, 619, 1521)), \\ ((798, 928,", "def getAllRectangleCloseAncestors(self): start_rectangle_nodes = [x for x in self.getNodes() if x.getEntry().getMBR().isRaw() == True]", "image.strokeColor(\"orange\") image.fillColor(\"none\") image.strokeWidth(4) multiplier = 3 * 0.8 # offset = (768 *", "False: break return do_overlap @staticmethod def findOverlapArea(mbr_a, mbr_b): if MBR.doOverlap(mbr_a, mbr_b) == False:", "= None if have_node_str == True: curr_leaf_status = \"-\" if (node.getParent() == None", "entry.getChild().setParent(leaf_node) adjust_result = RTree.rstarAdjustTree(self, leaf_node, [entry], False) else: split_result = self.rstarSplitNode(leaf_node, entry) l,", "curr_entry in entry_group1: curr_entry.getChild().setParent(node1) for curr_entry in entry_group2: curr_entry.getChild().setParent(node2) mbr_group1 = [x.getMBR() for", "0) point7 = (90, 100, 0) point8 = (110, 100, 0) curr_mbr1 =", "getMaximumNumEntriesPerNode(self): return self.M def isFull(self): return self.getNumEntries() >= self.getMaximumNumEntriesPerNode() def isUnderfull(self): return self.getNumEntries()", "return area @staticmethod def getEnlargedMBR(base_mbr, mbr): mbr_list = [base_mbr, mbr] upper_left_points = [x.getUpperLeft()", "mbr_b): if MBR.doOverlap(mbr_a, mbr_b) == False: return 0 else: upper_left_a = mbr_a.getUpperLeft() lower_right_a", "str(list(upper_left + lower_right) + [self.isRaw()]) return result def getDimension(self): return len(self.getUpperLeft()) def doesEnclose(self,", "[] def push(self, item, priority): pair = (priority,item) heapq.heappush(self.heap,pair) def pop(self): (priority,item) =", "y = location multiplier = 1 / (1.0 * 6.5) * 0.8 offset", "is_supernode = False): self.parent = parent self.is_leaf = is_leaf self.m = 8 self.M", "[node]) elif node.isNonTraditionalLeafNode() == True: node.addEntry(entry) entry.getChild().setParent(node) \"\"\" elif node.getNumChildren() == 0: pass", "parent != None: curr_entries = node.getEntries() entry = node.getParent().retrieveEntryForChild(node) children = [x.getChild() for", "isLeafNode() distinction # updated on 2016-08-25 to fix overlap logic for determining when", "overlap_ratio = 1 else: overlap_ratio = 0 else: overlap_ratio = overlap_area / (1.0", "entry_group1, entry_group2 = result parent = curr_node.getParent() \"\"\" if parent != None and", "= PythonMagick.Image(PythonMagick.Geometry(\"768x768\"), \"white\") image = PythonMagick.Image(PythonMagick.Geometry(\"1536x1536\"), \"white\") root_entry = self.getRootEntry() entries = [root_entry]", "string.join(overall_str_list, \" \") + \")\" return overall_str def toLeafStatusString(self): root = self.getRootEntry().getChild() return", "0: (priority,item) = heapq.heappop(heap) elif len(internal_node_stack_deque) != 0: item = internal_node_stack_deque.popleft() # (priority,item)", "next_node) next_node.setEntry(next_entry) curr_x_tree.insert(next_entry) union_area = curr_x_tree.getUnionArea() multi_overlap_ratio = overlap_area_sum / (1.0 * union_area)", "deque() # while len(heap) != 0: while len(internal_node_stack_deque) != 0 or len(heap) !=", "= CompositeMBR.makeMBR(mbr_list) \"\"\" def rstarSplitNode(self, node, entry): curr_node = node E_overall = list(set(curr_node.getEntries()", "1: # raise Exception() node.setToSuperNode(False) elif node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): mbr_list = [x.getMBR() in", "0.2) / 2 offset = (1536 * 0.2) / 2 x1 = 0", "strong running time estimates; the reason is that # otherwise the directed graph", "axis) return next_result def xtreeOverlapMinimalSplit(self, node, entry): if node.getSplitHistoryRootDimension() == None: return (None,", "in tree # returns a node, which can be None if no match", "RTreeNode(None, [], True) entry3 = RTreeEntry(mbr3, node3) node3.setEntry(entry3) tree.insert(entry3) mbr4 = RawMBR(point4, (110,", "return next_result def xtreeOverlapMinimalSplit(self, node, entry): if node.getSplitHistoryRootDimension() == None: return (None, None,", "if parent != None: original_entry = parent.retrieveEntryForChild(curr_node) parent.removeEntry(original_entry) if node != self.getRootEntry().getChild(): parent.addEntry(entry1)", "modify adjustTree(); # stop at root instead of non-existent parent of root; #", "peek(self): heap = self.heap pair = heap[0] result = pair return result def", "((911, 953, 196), (1776, 1662, 455)), \\ ((596, 892, 131), (1543, 1838, 669)),", "root = self.getRootEntry().getChild() return self.toStringHelper(root) def toStringHelper(self, node): if node == None: return", "upper_left, lower_right) self.mbr_list = mbr_list def getMBRList(self): return self.mbr_list def isComposite(self): return True", "= False): partial_result = [] self.doOverlapQueryHelper(mbr, self.getRootEntry(), partial_result, without_borders) return partial_result def doOverlapQueryHelper(self,", "for keep_node in keep_nodes: Q.append(keep_node) # only makes sense to speak of modifying", "= int(round(denominator / denominator)) # for n = 100 # k = int(round((1000", "(1877, 744, 791)), \\ ((1081, 1056, 1020), (1708, 1075, 1542)), \\ ((358, 815,", "return self.chooseLeafHelper(entry, chosen_child) def rstarChooseLeaf(self, entry): return self.rstarChooseLeafHelper(entry, self.getRootEntry().getChild()) def rstarChooseLeafHelper(self, entry, node):", "Q) # Q is in order of low-level to high-level; # wish to", "and comp_a1 < comp_b2 and comp_a2 > comp_b1 else: do_overlap = do_overlap and", "x1) + 2 * (y2 - y1) return margin surface_area = 0 for", "0), HyperRectangle((50, 50, 0), (100, 100, 0), 1)) tree = RTree() print tree.toString()", "getNodesForNode(self, node): node_list = [] self.getNodesHelper(node, node_list) return node_list \"\"\" def getUnionArea(self): pass", "ee], True) ended_with_split2, resulting_entries_from_split = adjust_result if ended_with_split2 == True: e, ee =", "1,000 works in 2.996 sec. for pypy with m = 2 and M", "[([y.getMBR() for y in x[0]], [y.getMBR() for y in x[1]]) for x in", "window_size_pairs if x[0] <= M and x[0] >= m and x[1] <= M", "int(y1 + random.random() * 100) # z2 = int(z1 + random.random() * 100)", "perimeter_x = next_x + offset perimeter_y = next_y + offset + radius image.draw(PythonMagick.DrawableCircle(center_x,", "= node.getSplitHistoryRootDimension() result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2 = result next_result", "entry, this should be okay leaf_node.removeEntry(entry) self.condenseTree(leaf_node) # root = self.getRootEntry().getChild() \"\"\" if", "in entries: # if len(tree.getNodes()) != 0: # print \"removing entry with mbr:\",", "1378)), \\ ((911, 953, 196), (1776, 1662, 455)), \\ ((596, 892, 131), (1543,", "tree.delete(entry3) tree.delete(entry4) tree.delete(entry5) tree.delete(entry6) tree.delete(entry7) tree.delete(entry8) \"\"\" print tree.toString() tree2 = RTree() import", "min(enlargement_values) candidate_tagged_enlargement_values = [x for x in tagged_enlargement_values if x[0] == min_enlargement_value] candidate_entries", "next_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), entry.getMBR()) == True: result = self.findLeafHelper(entry, next_entry) if", "tree.getAllRectangleCloseAncestors() print result print len(result) for entry_to_close_ancestor_entry_list_pair in result.items(): entry, close_ancestor_entry_list = entry_to_close_ancestor_entry_list_pair", "None) node = RTreeNode(None, [], True) entry = RTreeEntry(mbr, node) node.setEntry(entry) # entries.append(entry)", "leaf nodes # keep_nodes = [x for x in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() ==", "entries = node.getEntries() mbr_list = [x.getMBR() for x in entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list)", "= [] for entry in entries: child = entry.getChild() child_str = self.toNumChildrenStringHelper(child) curr_str", "self.setRoot(chosen_child) \"\"\" # if RN is a leaf node # search all entries", "curr_mbr.getArea() first_priority_component = 0 if curr_mbr_is_contained == True else 1 second_priority_component = (-1", "def toEntriesArePresentString(self): root = self.getRootEntry().getChild() return self.toEntriesArePresentStringHelper(root) def toEntriesArePresentStringHelper(self, node): if node ==", "196), (1776, 1662, 455)), \\ ((596, 892, 131), (1543, 1838, 669)), \\ ((879,", "!= None: original_entry = parent.retrieveEntryForChild(curr_node) parent.removeEntry(original_entry) if node != self.getRootEntry().getChild(): parent.addEntry(entry1) parent.addEntry(entry2) node1.setParent(parent)", "if conflict_x_tree.doEnclosureQueryWithEarlyStopping(mbr) == True: # ignore node if enclosing mbr exists in conflict", "def isNonTraditionalLeafNode(self): is_non_traditional_leaf_node = (self.getParent() == None and self.getNumChildren() == 0) or (self.getNumChildren()", "m): result_list = [] if len(entries) > (M + 1): raise Exception() window_left_sizes", "= [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x in upper_constituent_mbr_list_pairs] upper_margin_values = [x[0].getMarginValue() + x[1].getMarginValue() for", "True: if node.isLeafNode() == True and node == self.getRootEntry().getChild(): node.addEntry(entry) curr_node = entry.getChild()", "for x in entries] mbr = entry.getMBR() tagged_enlargement_values = [(MBR.getAreaEnlargement(x, mbr), x) for", "entry = RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry) \"\"\" for i in xrange(10): upper_left =", "upper_left_a = mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight() dimension", "raise Exception() conflict_x_tree.delete(matching_entry) # if node is a leaf node, it has an", "have_resulting_second_entry_from_split, is_first_call_after_first_pass): if node == None: return (False, []) else: parent = node.getParent()", "1091), (1932, 332, 1133)), \\ ((262, 221, 872), (500, 279, 1521)), \\ ((332,", "# stop at root instead of non-existent parent of root; # also, we", "= None, split_history_root_dimension = None, is_supernode = False): self.parent = parent self.is_leaf =", "# entries.append(entry) \"\"\" # for entry in entries[0 : 4]: # for entry", "!= entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node2) entry1 = RTreeEntry(curr_overall_mbr1, node1) entry2 = RTreeEntry(curr_overall_mbr2, node2) node1.setEntry(entry1)", "z2) upper_left = (x, y) lower_right = (x, y) # upper_left = ul_lr_pairs[i][0]", "= node.getEntries() candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries,", "root_mbr.isRaw() root_mbr_is_contained = reference_mbr.doesEnclose(root_mbr) root_mbr_area = root_mbr.getArea() first_priority_component = 0 if root_mbr_is_contained ==", "node.getEntries()] curr_x_tree = RTree() overlap_area_sum = sum([x.getArea() for x in mbr_list]) for curr_mbr", "queue curr_node = curr_entry.getChild() curr_mbr = curr_entry.getMBR() curr_mbr_is_actual = curr_mbr.isRaw() curr_mbr_is_contained = reference_mbr.doesEnclose(curr_mbr)", "None result1 = self.xtreeTopologicalSplit(node, entry) entry_collection1, entry_collection2, dimension = result1 mbr_collection1 = [x.getMBR()", "add all-start-rectangles close-ancestor finding, # which for a well-formed r-tree, takes O(n *", "node.setToSuperNode(False) elif node.isUnderfull() == True: \"\"\" if node.isUnderfull() == True: # print \"underfull\"", "# b \"left\" comp_b1 = min(upper_left_b[i], lower_right_b[i]) # b \"right\" comp_b2 = max(upper_left_b[i],", "# is not going to be good enough to cut down branches explored;", "# n = 10,000 works in 1 min. 54 sec. for pypy with", "/ 3.0) / denominator)) # for n = 14500 # x1 = int(100", "for start rectangle # taken from set of actual rectangles for an r-tree;", "\")\" return overall_str def toEntriesArePresentString(self): root = self.getRootEntry().getChild() return self.toEntriesArePresentStringHelper(root) def toEntriesArePresentStringHelper(self, node):", "RTreeEntry(curr_overall_mbr2, node2) node1.setEntry(entry1) node2.setEntry(entry2) if parent != None: original_entry = parent.retrieveEntryForChild(curr_node) parent.removeEntry(original_entry) if", "add single-start-rectangle-based # close-descendant finding that takes O(log(n)) time on average # for", "(priority,item) = heapq.heappop(self.heap) return item def isEmpty(self): return len(self.heap) == 0 def peek(self):", "= 5500 # k = int(round((10000 * math.log(10000, 2)) ** (1 / 3.0)", "next_root.setEntry(self.getRootEntry()) node1.setParent(next_root) node2.setParent(next_root) pass return (node1, node2, entry1, entry2) @staticmethod def rstarPreadjustTree(self, leaf_node):", "sides.append(side) area = reduce(lambda x, y: x * y, sides) return area @staticmethod", "growth; expected 1x slower) # n = 1000 # 1.1649 seconds (~2.72x slower", "upper_left_point = tuple(min_components) lower_right_point = tuple(max_components) result_mbr = CompositeMBR(upper_left_point, lower_right_point, component_mbr_list) return result_mbr", "since we don't need a O(log(n)) time operation # to find the entry", "growth; expected 664x slower) # n = 14500 # 170.053 seconds (~398x slower", "return mbr @staticmethod def getAreaEnlargement(base_mbr, mbr): base_mbr_area = base_mbr.getArea() enlarged_mbr = MBR.getEnlargedMBR(base_mbr, mbr)", "__init__(self, upper_left, lower_right, contained_item): MBR.__init__(self, upper_left, lower_right) self.contained_item = contained_item def isRaw(self): return", "# assume item is in tree # returns a node, which can be", "one child (and it is not a leaf) # remove the root #", "\"seconds\" # raise Exception() for entry_to_close_ancestor_entry_list_pair in result.items(): entry, close_ancestor_entry_list = entry_to_close_ancestor_entry_list_pair print", "slower) # n = 10000 # 84.222 seconds (~197x slower for 100x growth;", "def xtreeInsertHelper(self, entry, node): split_status = None next_mbr = None if True: #", "upper_left_point = tuple(min_components) lower_right_point = tuple(max_components) result_mbr_list = base_mbr.getMBRList() + [mbr] mbr =", "# return a list of entries def getRectangleCloseDescendants(self, reference_entry): # repeatedly pop nodes,", "x in self.getEntries()]) is_leaf_node = self.getNumChildren() == 0 return is_leaf_node def addEntry(self, entry):", "image = PythonMagick.Image(PythonMagick.Geometry(\"768x768\"), \"white\") image = PythonMagick.Image(PythonMagick.Geometry(\"1536x1536\"), \"white\") root_entry = self.getRootEntry() entries =", "curr_leaf_status = str(node.getNumChildren()) overall_str_list = [curr_leaf_status] else: overall_str_list = [] for entry in", "1248, 1035)), \\ ((920, 974, 724), (1802, 1524, 1378)), \\ ((911, 953, 196),", "parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) # don't use isLeafNode() for this, as internal nodes", "implied by the r-tree # is not acyclic and we have cliques #", "mbr def getChild(self): return self.child def setChild(self, node): self.child = node @staticmethod def", "node.isLeafNode() == False: # if we made it this far, we should add", "self.getRectangleCloseDescendants(start_rectangle_entry) for close_descendant_entry in close_descendant_entries: start_rectangle_to_close_ancestor_entries_dict[close_descendant_entry].append(start_rectangle_entry) return start_rectangle_to_close_ancestor_entries_dict def draw(self): # im =", "adjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split, is_first_call_after_first_pass): if node == None: return (False, []) else:", "self.M = 16 self.child_to_entry_dict = {} for curr_entry in entries: curr_child = curr_entry.getChild()", "curr_entry = node.getEntry() curr_mbr = curr_entry.getMBR() mbr = entry.getMBR() next_mbr = MBR.getEnlargedMBR(curr_mbr, mbr)", "node # search all entries of RN to find E.mbr # else: #", "!= None and (node in parent.getChildren()): pass \"\"\" node1 = RTreeNode(parent, entry_group1, prev_leaf_status)", "in entries[0 : 15]: for entry in entries: # if len(tree.getNodes()) != 0:", "(M + 1): raise Exception() window_left_sizes = [m - 1 + k for", "@staticmethod def rstarChooseSplitIndex(entries, axis, M, m): result = RTree.rstarGenDistributions(entries, M, m) candidate_distributions =", "numbers are for upper-left's in (100, 10100) and # lower-right's in (ul_i, ul_i", "partial_result = [] self.doContainmentQueryHelper(mbr, self.getRootEntry(), partial_result) return partial_result def doContainmentQueryHelper(self, mbr, entry, partial_result):", "\"\"\" if parent != None and (node in parent.getChildren()): pass \"\"\" node1 =", "M and x[1] >= m] for i in xrange(entries[0].getMBR().getDimension()): low_sorted_entries = entries[ :", "point2): x1, y1 = point1 x2, y2 = point2 change_x = x2 -", "getEntryForChild(self, child_node): return (self.child_to_entry_dict)[child_node] def getChildren(self): return (self.child_to_entry_dict).keys() def getNumEntries(self): return len(self.child_to_entry_dict) def", "i in xrange(component_mbr_list[0].getDimension()): components = [x[i] for x in points] min_comp_value = min(components)", "]) for j in xrange(len(window_left_sizes))] curr_tuple = (low_distributions, upper_distributions) result_list.append(curr_tuple) return result_list @staticmethod", "x in partner_entries] partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if node.isLeafNode() == False: if have_resulting_second_entry_from_split", "node, resulting_entries_from_split, have_resulting_second_entry_from_split): if node.getParent() == None: entry = tree.getRootEntry() curr_entries = entry.getChild().getEntries()", "node == self.getRootEntry().getChild() if is_root_node == True: have_node_str = True overall_str_list = None", "node.setToSuperNode(False) elif node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): mbr_list = [x.getMBR() in node.getEntries()] curr_x_tree = RTree()", "node, entries): if node.isSuperNode() == False: node.setToSuperNode(True) # questionable if this is really", "self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries() axis = node.getSplitHistoryRootDimension() result = RTree.rstarChooseSplitIndex(E_overall,", "ee = split_result return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [e, ee], True) else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(),", "matching_entries: # raise Exception() conflict_x_tree.delete(matching_entry) # if node is a leaf node, it", "False and reference_mbr.doesEnclose(curr_mbr) == False: continue # item = curr_entry # internal_node_stack_deque.appendleft(item) priority", "def toNumChildrenString(self): root = self.getRootEntry().getChild() return self.toNumChildrenStringHelper(root) def toNumChildrenStringHelper(self, node): if node ==", "taken from set of actual rectangles # for an r-tree and O(n *", "84.222 seconds (~197x slower for 100x growth; expected 664x slower) # n =", "entry): return self.chooseLeafHelper(entry, self.getRootEntry().getChild()) def chooseLeafHelper(self, entry, node): if node.isLeafNode() == True: if", "node1.setSplitHistoryRootDimension(dimension) node2.setSplitHistoryRootDimension(dimension) if self.getRootEntry().getChild() == node: next_root_entry = RTreeEntry(next_mbr, None) next_root = RTreeNode(None,", "getMBRList(self): return self.mbr_list def isComposite(self): return True @staticmethod def makeMBR(component_mbr_list): upper_left_points = [x.getUpperLeft()", "ul_i + 10000) # two strange things going on - saturation occurs #", "range(1, M - 2 * m + 2 + 1)] window_left_sizes = [x", "root_mbr = CompositeMBR(None, None, None) root_entry = RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) def getRootEntry(self):", "in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return else: # raise Exception() # print", "children to priority queue, # ignore if contained rectangle is contained by a", "= lower_right_b[i] side = max(0, min(comp_a2, comp_b2) - max(comp_a1, comp_b1)) sides.append(side) intersection_volume =", "if (parent.getNumChildren() + 2) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(entry) parent.addEntry(partner_entry) entry.getChild().setParent(parent) partner_entry.getChild().setParent(parent) return tree.adjustTree(tree, parent,", "root_entry = self.getRootEntry() entries = [root_entry] RTreeEntry.draw(self, entries, image, 0) \"\"\" image.strokeColor(\"orange\") image.fillColor(\"none\")", "partial_result, without_borders): if entry.getMBR().isRaw() == True: if MBR.doOverlap(entry.getMBR(), mbr, without_borders) == True: partial_result.append(entry)", "was_successful == True: mbr_collection1 = [x.getMBR() for x in entry_collection1] mbr_collection2 = [x.getMBR()", "if contained rectangle is contained by a rectangle in conflict x-tree, # add", "node2 = RTreeNode(parent, entry_group2, prev_leaf_status) for curr_entry in entry_group1: curr_entry.getChild().setParent(node1) for curr_entry in", "y, z) # lower_right = lower_rights[i] mbr = RawMBR(upper_left, lower_right, None) node =", "no match is found # finds one match if such a node exists", "entries = entry.getChild().getEntries() for curr_entry in entries: if curr_entry.getMBR().doesEnclose(mbr) == True: self.doEnclosureQueryHelper(mbr, curr_entry,", "upper_left, lower_right) self.contained_item = contained_item def isRaw(self): return True @staticmethod def makeMBRFromPoint(point): upper_left", "O(log(n)) time operation # to find the entry containing node; just look at", "0) im.save(\"tree.png\", \"PNG\") \"\"\" # image = PythonMagick.Image(PythonMagick.Geometry(\"768x768\"), \"white\") image = PythonMagick.Image(PythonMagick.Geometry(\"1536x1536\"), \"white\")", "= self.getRootEntry() entries = [root_entry] RTreeEntry.draw(self, entries, image, 0) \"\"\" image.strokeColor(\"orange\") image.fillColor(\"none\") image.strokeWidth(4)", "entry def isSuperNode(self): return self.is_supernode def setToSuperNode(self, is_supernode): self.is_supernode = is_supernode def getSplitHistoryRootDimension(self):", "100), point5) node5 = RTreeNode(None, [], True) entry5 = RTreeEntry(mbr5, node5) node5.setEntry(entry5) tree.insert(entry5)", "of entries def getRectangleCloseDescendants(self, reference_entry): # repeatedly pop nodes, prune using enclosure/containment #", "= upper_left x2, y2 = lower_right multiplier = 1 / (1.0 * 6.5)", "return candidate_entries def resolveEnlargementTie(self, entries, entry): mbr = entry.getMBR() tagged_mbr_list = [] for", "mbr:\", entry.getMBR().toString() # print \"tree, currently:\", tree.toString() # tree2.delete(entry) pass # print tree.toString()", "parent = node.getParent() curr_entries = node.getEntries() entry = parent.retrieveEntryForChild(node) children = [x.getChild() for", "= MBR.getEnlargedMBR(base_mbr, mbr) enlarged_mbr_area = enlarged_mbr.getArea() area_change = enlarged_mbr_area - base_mbr_area return area_change", "# else: # RN is an internal node # find all entries of", "== None: entry = tree.getRootEntry() else: entry = parent.retrieveEntryForChild(node) children = [x.getChild() for", "= mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight() dimension = mbr_a.getDimension() sides =", "root_node.setEntry(root_entry) self.setRootEntry(root_entry) return else: entry = self.getRootEntry() curr_entries = entry.getChild().getEntries() children = [x.getChild()", "= RawMBR(upper_left, lower_right, contained_item) return mbr def doesMatch(self, mbr): upper_left_matches = self.getUpperLeft() ==", "E from L # call algorithm condenseTree(L) # if the root has only", "getRectangleCloseDescendantsHelper(self, heap, reference_mbr, result_entry_list, ignore_entry): conflict_x_tree = RTree() internal_node_stack_deque = deque() # while", "True: # split just in case # print \"split\" return (RTree.SPLIT, [node]) elif", "n = 5500 # 23.899 seconds (~55.96x slower for 55x growth; expected 317x", "lower_right1 = self.getLowerRight() upper_left2 = mbr.getUpperLeft() lower_right2 = mbr.getLowerRight() is_equal = upper_left1 ==", "== True: # ignore node if enclosing mbr exists in conflict x-tree continue", "if (parent.getNumChildren() + 1) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(partner_entry) partner_entry.getChild().setParent(parent) return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False)", "be consistent # we never split a super-node # updated on 2016-08-23 to", "node).getChild() result = self.xtreeInsertHelper(entry, follow) split_status, added_nodes = result curr_entry = node.getEntry() curr_mbr", "= RTreeEntry(curr_overall_mbr2, node2) node1.setEntry(entry1) node2.setEntry(entry2) if parent != None: original_entry = parent.retrieveEntryForChild(curr_node) parent.removeEntry(original_entry)", "= sum([x.getArea() for x in mbr_list]) for curr_mbr in mbr_list: next_mbr = RawMBR(curr_mbr.getUpperLeft(),", "(40, 100, 0) point3 = (50, 100, 0) point4 = (60, 100, 0)", "# upper_left = ul_lr_pairs[i][0] # lower_right = ul_lr_pairs[i][1] # x = int(random.randint(1, 100))", "self.getEntries()]) return is_non_traditional_leaf_node \"\"\" def isTraditionalLeafNode(self): is_traditional_leaf_node = self.getNumEntries() == 0 return is_traditional_leaf_node", "(~197x slower for 100x growth; expected 664x slower) # n = 14500 #", "else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False) else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(), resulting_entries_from_split, have_resulting_second_entry_from_split) \"\"\"", "= second_entry if have_resulting_second_entry_from_split == True and is_first_call_after_first_pass != True: partner_node = partner_entry.getChild()", "CompositeMBR.makeMBR(mbr_list) \"\"\" def rstarSplitNode(self, node, entry): curr_node = node E_overall = list(set(curr_node.getEntries() +", "entries = node.getEntries() mbr_list = [entry.getMBR()] for mbr in mbr_list: upper_left = mbr.getUpperLeft()", "0: pass color_choice = depth % 3 color = None if color_choice ==", "tagged_mbr_list = [] for curr_entry in entries: base_mbr = curr_entry.getMBR() curr_mbr = MBR.getEnlargedMBR(base_mbr,", "next_x = multiplier * x next_y = multiplier * y image.strokeColor(\"none\") image.fillColor(\"black\") center_x", "reference_entry.getMBR() root_entry = self.getRootEntry() root_node = root_entry.getChild() root_mbr = root_entry.getMBR() root_mbr_is_actual = root_mbr.isRaw()", "i in xrange(4): \"\"\" ul_lr_pairs = [((797, 989, 602), (910, 1248, 1035)), \\", "* term2 surface_area += term margin = surface_area return margin def toString(self): upper_left", "area_values = [x[0] for x in tagged_area_values] min_area = min(area_values) candidate_tagged_area_values = [x", "set priority correctly and add to priority queue curr_node = curr_entry.getChild() curr_mbr =", "= [x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) partner_entry = None", "(entry_group1, entry_group2, axis, False) return next_result def xtreeSupernodeInsert(self, node, entries): if node.isSuperNode() ==", "= self.toEntriesArePresentStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \" \")", "max(upper_left_a[i], lower_right_a[i]) # b \"left\" comp_b1 = min(upper_left_b[i], lower_right_b[i]) # b \"right\" comp_b2", "time_diff, \"seconds\" # raise Exception() for entry_to_close_ancestor_entry_list_pair in result.items(): entry, close_ancestor_entry_list = entry_to_close_ancestor_entry_list_pair", "= self.getRootEntry().getChild() return self.toStringHelper(root) def toStringHelper(self, node): if node == None: return \"\"", "in xrange(base_mbr.getDimension()): components = [x[i] for x in points] min_comp_value = min(components) max_comp_value", "return does_enclose def isEqualTo(self, mbr): upper_left1 = self.getUpperLeft() lower_right1 = self.getLowerRight() upper_left2 =", "at worst; # and to add all-start-rectangles close-ancestor finding, # which for a", "node E_overall = list(set(curr_node.getEntries() + [entry])) return self.rstarSplitNodeHelper(node, E_overall, entry) def rstarSplitNodeHelper(self, node,", "parent.removeEntry(parent.retrieveEntryForChild(node)) Q.append(node) # raise Exception() if node.getNumChildren() <= 1: # raise Exception() node.setToSuperNode(False)", "@staticmethod def adjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split, is_first_call_after_first_pass): if node == None: return (False,", "[(MBR.findOverlapArea(x, mbr), x) for x in mbr_list] enlargement_values = [x[0] for x in", "nodes always point to same entries # unless we explicitly create new entries,", "# tree2.delete(entry) pass # print tree.toString() result = tree.getRectangleCloseDescendants(entry8) print result result =", "# entry_pq = PriorityQueue() heap = [] # entry_pq.push(root_entry, priority) item = root_entry", "# y = 10 # z = 10 # lower_right = (x, y,", "l, ll, e, ee = split_result resulting_entries_from_split = [e, ee] next_root = RTreeNode(None,", "string.join(overall_str_list, \" \") + \")\" return overall_str def toDepthString(self): root = self.getRootEntry().getChild() return", "window_left_sizes[j]], low_sorted_entries[window_left_sizes[j] : ]) for j in xrange(len(window_left_sizes))] upper_sorted_entries = entries[ : ]", "class HyperRectangle: def __init__(self, upper_left, lower_right, id_value): self.upper_left = upper_left self.lower_right = lower_right", "if entry.getMBR().isRaw() == True: if entry.getMBR().doesEnclose(mbr) == True: return True else: entries =", "True: partner_node = partner_entry.getChild() partner_entries = partner_node.getEntries() partner_children = [x.getChild() for x in", "isComposite(self): return True @staticmethod def makeMBR(component_mbr_list): upper_left_points = [x.getUpperLeft() for x in component_mbr_list]", "= RawMBR((50, 50, 0), (100, 100, 0), HyperRectangle((50, 50, 0), (100, 100, 0),", "enclose reference mbr # and associated mbr is not contained within reference mbr", "# k = int(round((1000 * math.log(1000, 2)) ** (1 / 3.0) / denominator))", "m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() axis = RTree.rstarChooseSplitAxis(E_overall, M, m) result =", "print \"split\" return (RTree.SPLIT, [node]) elif node.isNonTraditionalLeafNode() == True: node.addEntry(entry) entry.getChild().setParent(node) \"\"\" elif", "= mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight() dimension = mbr_a.getDimension() sides = [] for i", "= left_value1 <= left_value2 and right_value1 >= right_value2 if component_does_enclose == False: does_enclose", "for x in mbr_list] enlargement_values = [x[0] for x in tagged_enlargement_values] min_enlargement_value =", "in x[0]], [y.getMBR() for y in x[1]]), x) for x in next_candidate_distributions] mbr_pair_tagged_next_candidate_distributions", "for curr_entry in entry_collection2: curr_entry.getChild().setParent(node2) node1.setSplitHistoryRootDimension(dimension) node2.setSplitHistoryRootDimension(dimension) if self.getRootEntry().getChild() == node: next_root_entry =", "curr_entry): \"\"\" if node.isLeafNode() == False: curr_mbr = entry.getMBR() entries = self.getEntries() tagged_mbr_list", "area2 = mbr2.getArea() union_area = area1 + area2 - overlap_area ovelap_ratio = None", "low-level to high-level; # wish to insert using order of high-level to low-level", "min(components) max_comp_value = max(components) min_components.append(min_comp_value) max_components.append(max_comp_value) upper_left_point = tuple(min_components) lower_right_point = tuple(max_components) result_mbr_list", "low_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x in low_constituent_mbr_list_pairs] low_margin_values = [x[0].getMarginValue() + x[1].getMarginValue()", "depth + 1) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \"", "are unique for close-descendant # and close-ancestor finding; the assumption is necessary #", "/ 3.0) / denominator)) # for n = 5500 # k = int(round((10000", "M and (len(entries) - x) >= m] window_size_pairs = [(window_left_sizes[i], len(entries) - window_left_sizes[i])", "entries: # set priority correctly and add to priority queue curr_node = curr_entry.getChild()", "= mbr_list def getMBRList(self): return self.mbr_list def isComposite(self): return True @staticmethod def makeMBR(component_mbr_list):", "in tagged_mbr_list] area_values = [x[0] for x in tagged_area_values] min_area = min(area_values) candidate_tagged_area_values", "which is not the case when we initially insert parent = node.getParent() entry1", "(1 / 3.0) / denominator)) # for n = 1000 # k =", "while curr_node.isLeafNode() == False: curr_node = curr_node.getChildren()[0] depth = depth + 1 return", "True: curr_leaf_status = str(node.getNumChildren()) overall_str_list = [curr_leaf_status] else: overall_str_list = [] for entry", "curr_node = entry.getChild() node.addEntry(entry) # needed this curr_node.setParent(node) \"\"\" entries = node.getEntries() mbr_list", "return mbr.getUpperLeft() def getVec(self): return self.vec def getComponent(self, d): return self.getVec()[d] def getIDValue(self):", "= root depth = 0 while curr_node.isLeafNode() == False: curr_node = curr_node.getChildren()[0] depth", "(False, None, None, dimension) else: return (True, entry_collection3, entry_collection4, dimension) else: return (True,", "lower_right = (x2, y2, z2) upper_left = (x, y) lower_right = (x, y)", "matching_overlap_value_tagged_candidate_distributions] if len(matching_overlap_value_tagged_candidate_distributions) > 1: next_candidate_distributions = next_next_candidates mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for y", "(RTree.NO_SPLIT, [node]) def rstarInsert(self, entry): leaf_node = self.rstarChooseLeaf(entry) adjust_result = None if leaf_node.isFull()", "ul_lr_pairs[i][0] # lower_right = ul_lr_pairs[i][1] # x = int(random.randint(1, 100)) # y =", "= None if have_node_str == True: curr_leaf_status = \"-\" if node.isLeafNode() == False", "overall_str_list = None if have_node_str == True: curr_leaf_status = \"-\" if (node.getParent() ==", "removing this entry, this should be okay leaf_node.removeEntry(entry) self.condenseTree(leaf_node) # root = self.getRootEntry().getChild()", "result[axis][0] + result[axis][1] mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for y in x[0]], [y.getMBR() for y", "for i in xrange(dimension): left_value1 = self.getUpperLeft()[i] left_value2 = mbr.getUpperLeft()[i] right_value1 = self.getLowerRight()[i]", "upper_left1 == upper_left2 and lower_right1 == lower_right2 return is_equal class RawMBR(MBR): def __init__(self,", "None) node2 = RTreeNode(parent, entry_collection2, None, entry2) entry2.setChild(node2) for curr_entry in entry_collection1: curr_entry.getChild().setParent(node1)", "S_comp_dict = {} for i in xrange(entries[0].getMBR().getDimension()): low_comp_distributions, upper_comp_distributions = result[i] S_comp_value =", "priority = -1 * curr_mbr_area item = curr_entry pair = (priority,item) heapq.heappush(heap,pair) elif", "# where n is number of actual rectangles or leaves; # assumes that", "rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split): if node.getParent() == None: entry = tree.getRootEntry() curr_entries =", "= RTree.rstarGenDistributions(entries, M, m) S_comp_dict = {} for i in xrange(entries[0].getMBR().getDimension()): low_comp_distributions, upper_comp_distributions", "* (x2 - x1) + 2 * (y2 - y1) return margin surface_area", "else: entry = node.getParent().retrieveEntryForChild(node) \"\"\" entry = parent.retrieveEntryForChild(node) children = [x.getChild() for x", "# z = 10 # lower_right = (x, y, z) # lower_right =", "mbr_list = [x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) self.condenseTreeHelper(node.getParent(), Q)", ": ]) for j in xrange(len(window_left_sizes))] upper_sorted_entries = entries[ : ] upper_sorted_entries.sort(key =", "= None if have_resulting_second_entry_from_split == True: first_entry, second_entry = resulting_entries_from_split partner_entry = second_entry", "in entries[0 : 4]: # for entry in entries[0 : 15]: for entry", "conflict x-tree, # use as priority (prefer_contained, prefer_large_area_if_contained_else_small) if self.getRootEntry().getChild().getNumChildren() == 0: return", "\"white\") \"\"\" im = Image.new(\"RGB\", (768, 768), \"white\") draw = ImageDraw.Draw(im) root =", "self.getUpperLeft()[i] left_value2 = mbr.getUpperLeft()[i] right_value1 = self.getLowerRight()[i] right_value2 = mbr.getLowerRight()[i] component_does_enclose = left_value1", "curr_overall_mbr1 = CompositeMBR.makeMBR(mbr_group1) curr_overall_mbr2 = CompositeMBR.makeMBR(mbr_group2) for curr_entry in entry_group1: next_curr_node = curr_entry.getChild()", "tree.toString() tree2 = RTree() import random entries = [] # lower_rights = [(3,", "lower_right_point, component_mbr_list) return result_mbr class HyperRectangle: def __init__(self, upper_left, lower_right, id_value): self.upper_left =", "upper_left = (x1, y1, z1) # lower_right = (x2, y2, z2) upper_left =", "= chosen_entry.getChild() return chosen_entry def xtreeInsert(self, entry): # print \"insert\" return self.xtreeInsertHelper(entry, self.getRootEntry().getChild())", "2)) ** (1 / 3.0) k = 1 # k = int(round(denominator /", "entries # unless we explicitly create new entries, # which we do do", "min_combined_area_value = min(combined_area_values) matching_combined_area_tagged_next_candidate_distributions = [x for x in combined_area_tagged_next_candidate_distributions if x[0] ==", "if node != self.getRootEntry().getChild(): parent.addEntry(entry1) parent.addEntry(entry2) node1.setParent(parent) node2.setParent(parent) else: next_root = RTreeNode(None, [entry1,", "overall_str = \"(\" + string.join(overall_str_list, \" \") + \")\" return overall_str def toDepthString(self):", "is really necessary for entry in entries: curr_node = entry.getChild() node.addEntry(entry) # needed", "def condenseTreeHelper(self, node, Q): # demote super-node if necessary if node.isSuperNode() == True", "second_priority_component = (-1 if root_mbr_is_contained == True else 1) * root_mbr_area # min-pq", "entry2 = RTreeEntry(mbr2, node2) node2.setEntry(entry2) tree.insert(entry2) mbr3 = RawMBR(point3, (110, 200, 100), point3)", "print entry_pq # raise Exception() result_entry_list = [] self.getRectangleCloseDescendantsHelper(heap, reference_mbr, result_entry_list, reference_entry) return", "graph implied by the r-tree # is not acyclic and we have cliques", "# dimension is implicit (determined using points sampled) and assumed to be consistent", "matches the ignore entry continue if node.isLeafNode() == True: # could have a", "= entry.getMBR() next_mbr = MBR.getEnlargedMBR(curr_mbr, mbr) node.getEntry().setMBR(next_mbr) # this parent-setting step is crucial", "rectangle; # check explicitly for this case if reference_mbr.doesEnclose(mbr) == False: continue #", "None next_mbr = None if True: # if node.getNumChildren() == 0 and node", "== True else 1 second_priority_component = (-1 if curr_mbr_is_contained == True else 1)", "a well-formed r-tree, takes O(n * log(n)) time; # these times involve n,", "tree.delete(entry8) # tree.insert(entry1) \"\"\" tree.delete(entry1) tree.delete(entry2) tree.delete(entry3) tree.delete(entry4) tree.delete(entry5) tree.delete(entry6) tree.delete(entry7) tree.delete(entry8) \"\"\"", "if MBR.doOverlap(mbr_a, mbr_b) == False: return 0 else: upper_left_a = mbr_a.getUpperLeft() lower_right_a =", "(self.child_to_entry_dict).keys() def getNumEntries(self): return len(self.child_to_entry_dict) def getNumChildren(self): return self.getNumEntries() def setParent(self, node): self.parent", "# returns entries def doEnclosureQuery(self, mbr): partial_result = [] self.doEnclosureQueryHelper(mbr, self.getRootEntry(), partial_result) return", "return self.toLeafStatusStringHelper(root) def toLeafStatusStringHelper(self, node): if node == None: return \"\" entries =", "self.condenseTreeHelper(node.getParent(), Q) return # not tested # returns entries # does intersection query", "e, ee = split_result adjust_result = RTree.rstarAdjustTree(self, l, [e, ee], True) ended_with_split2, resulting_entries_from_split", "== 0 else [node.getEntry().getMBR().toString()] # overall_str_list = [] if node.getNumChildren() == 0 else", "lower_right_point = tuple(max_components) result_mbr = CompositeMBR(upper_left_point, lower_right_point, component_mbr_list) return result_mbr class HyperRectangle: def", "using points sampled) and assumed to be consistent # we never split a", "MBR.doOverlap(entry.getMBR(), mbr, without_borders) == True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for curr_entry in", "for a delete\") # if parent has zero entries after removing this entry,", "[(upper_sorted_entries[ : window_left_sizes[j]], upper_sorted_entries[window_left_sizes[j] : ]) for j in xrange(len(window_left_sizes))] curr_tuple = (low_distributions,", "in entry_group2: next_curr_node = curr_entry.getChild() if curr_entry != entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node2) entry1 =", "(1.0 * 6.5) * 0.8 offset = (1536 * 0.2) / 2 next_x", "True: e, ee = resulting_entries_from_split l = e.getChild() ll = ee.getChild() if (self.getRootEntry().getChild().getNumEntries()", "/ (1.0 * 6.5) * 0.8 offset = (1536 * 0.2) / 2", "next_x2, next_y2)) image.write(\"tree.png\") def main(): point1 = (30, 100, 0) point2 = (40,", "False: # this is idempotent for added_node in added_nodes: node.addEntry(added_node.getEntry()) added_node.setParent(node) if split_status", "entries[0 : 4]: # print \"supernodes:\", [x for x in tree.getNodes() if x.isSuperNode()", "return \"\" entries = node.getEntries() children = node.getChildren() have_node_str = True is_root_node =", "counter saturation, domain has to grow with n # n = 100 #", "curr_entry = entries[i] curr_mbr = curr_entry.getMBR() mbr_to_entry_dict[curr_mbr] = curr_entry mbr_list = [x.getMBR() for", "do_overlap and comp_a1 < comp_b2 and comp_a2 > comp_b1 else: do_overlap = do_overlap", "lower_right = (x, y) # upper_left = ul_lr_pairs[i][0] # lower_right = ul_lr_pairs[i][1] #", "0) def hasConsistentNonTraditionalLeafDepthValuesHelper(self, node, depth, curr_depth): if node == None: return elif node.isLeafNode()", "= RTreeNode(None, [], True) next_entry = RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) curr_x_tree.insert(next_entry) union_area = curr_x_tree.getUnionArea()", "# for entry in entries[0 : 15]: for entry in entries: # if", "entry == curr_entry: return True else: return False else: entries = curr_entry.getChild().getEntries() for", "= RTreeNode(None, [entry1, entry2], False) self.getRootEntry().setChild(next_root) next_root.setEntry(self.getRootEntry()) node1.setParent(next_root) node2.setParent(next_root) pass return (node1, node2,", "= overlap_area_sum / (1.0 * union_area) if multi_overlap_ratio <= RTree.MAX_OVERLAP_RATIO: node.setToSuperNode(False) elif node.isUnderfull()", "a node to be found for a delete\") # if parent has zero", "# 230.0411 seconds (~538x slower for 200x growth; expected 1528x slower) # n", "image.strokeWidth(4) image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2)) if len(entries) == 0: parent = entry.getChild().getParent() mbr", "self.getRootEntry().getChild().getMaximumNumEntriesPerNode(): self.getRootEntry().getChild().addEntry(ee) ll.setParent(self.getRootEntry().getChild()) else: split_result = self.rstarSplitNode(self.getRootEntry().getChild(), ee) l, ll, e, ee =", "best-first priority queue for leaf nodes # updated on 2016-11-16 to fix margin", "offset + radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y)) children = [x.getChild() for x in", "mbr_list = [x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return (have_resulting_second_entry_from_split,", "overall_str_list = None if have_node_str == True: curr_leaf_status = str(node.getNumChildren()) overall_str_list = [curr_leaf_status]", "window_left_sizes if x <= M and x >= m and (len(entries) - x)", "self.xtreeSplitNode(node, entry) was_successful, entry_collection1, entry_collection2, dimension = split_result if was_successful == True: mbr_collection1", "children = [x.getChild() for x in entries] entry.draw(tree, entries, image, depth + 1)", "def getIDValue(self): return self.id_value class Point: def __init__(self, vec, id_value): self.vec = vec", "leaf node, it has an actual rectangle # decide whether to include associated", "return self.xtreeInsertHelper(entry, self.getRootEntry().getChild()) SPLIT = 0 SUPERNODE = 1 NO_SPLIT = 2 def", "result == True: return True return False # returns entries def doContainmentQuery(self, mbr):", "resulting_entries_from_split = adjust_result if ended_with_split2 == True: e, ee = resulting_entries_from_split l =", "lower_right = self.getLowerRight() result = str(list(upper_left + lower_right) + [self.isRaw()]) return result def", "crucial # if node.isNonTraditionalLeafNode() == False: # this is idempotent for added_node in", "20) lower_right = (40, 40) elif i % 4 == 2: upper_left =", "entry.getMBR().toString() for close_ancestor_entry in close_ancestor_entry_list: print \"close ancestor:\", close_ancestor_entry.getMBR().toString() if __name__ == \"__main__\":", "== False: curr_node = curr_node.getChildren()[0] depth = depth + 1 return self.hasConsistentNonTraditionalLeafDepthValuesHelper(root, depth,", "(579, 950, 700)), \\ ((297, 196, 750), (1085, 718, 1259)), \\ ((808, 926,", "= entry.getChild() child_str = self.toStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" +", "# n = 1,000 works in 3.428 sec. for pypy with m =", "RTreeNode(None, [], True) entry1 = RTreeEntry(mbr1, node1) node1.setEntry(entry1) tree.insert(entry1) mbr2 = RawMBR(point2, (110,", "parent, [entry], False, False) else: parent.addEntry(entry) entry.getChild().setParent(parent) split_result = tree.splitNode(parent, partner_entry) l, ll,", "chosen_d_value = chosen_d_S_pair[0] return chosen_d_value @staticmethod def rstarChooseSplitIndex(entries, axis, M, m): result =", "result_mbr = RawMBR(upper_left, lower_right, point) return result_mbr def getContainedItem(self): return self.contained_item def getMBRList(self):", "x1 + offset, multiplier * y1 + offset) next_x2, next_y2 = (multiplier *", "@staticmethod def toPoint(mbr): if mbr.getUpperLeft() != mbr.getLowerRight(): raise Exception(\"attempted to turn a non-point", "new entries, # which we do do occasionally # note that M of", "node: next_root_entry = RTreeEntry(next_mbr, None) next_root = RTreeNode(None, [entry1, entry2], None, next_root_entry) next_root_entry.setChild(next_root)", "do_overlap @staticmethod def findOverlapArea(mbr_a, mbr_b): if MBR.doOverlap(mbr_a, mbr_b) == False: return 0 else:", "def getChildren(self): return (self.child_to_entry_dict).keys() def getNumEntries(self): return len(self.child_to_entry_dict) def getNumChildren(self): return self.getNumEntries() def", "in entry_collection1: curr_entry.getChild().setParent(node1) for curr_entry in entry_collection2: curr_entry.getChild().setParent(node2) node1.setSplitHistoryRootDimension(dimension) node2.setSplitHistoryRootDimension(dimension) if self.getRootEntry().getChild() ==", "True) entry2 = RTreeEntry(mbr2, node2) node2.setEntry(entry2) tree.insert(entry2) mbr3 = RawMBR(point3, (110, 200, 100),", "split_status = None next_mbr = None if True: # if node.getNumChildren() == 0", "* math.log(10000, 2)) ** (1 / 3.0) / denominator)) # for n =", "# takes O(log(n)) time on average for start rectangle # taken from set", "(1932, 332, 1133)), \\ ((262, 221, 872), (500, 279, 1521)), \\ ((332, 886,", "for leaf nodes # updated on 2016-11-16 to fix margin calculation # note", "heap[0] result = pair return result def toList(self): pair_list = self.heap items =", "slower for 145x growth; expected 1040x slower) # n = 20000 # 230.0411", "else: parent.removeEntry(node.getEntry()) parent.addEntry(entry1) parent.addEntry(entry2) # print \"split #2\" return (RTree.SPLIT, [node1, node2]) else:", "tree.insert(entry4) mbr5 = RawMBR(point5, (110, 200, 100), point5) node5 = RTreeNode(None, [], True)", "2 * (x2 - x1) + 2 * (y2 - y1) return margin", "assume \"maximal disjointedness\" # and depth-first stack for internal nodes and # best-first", "in upper_mbr_pairs] upper_margin_value_sum = sum(upper_margin_values) S_comp_value += upper_margin_value_sum S_comp_dict[i] = S_comp_value d_S_pairs =", "enlargement_values = [x[0] for x in tagged_enlargement_values] min_enlargement_value = min(enlargement_values) candidate_tagged_enlargement_values = [x", "second_entry = resulting_entries_from_split partner_entry = second_entry if have_resulting_second_entry_from_split == True: partner_node = partner_entry.getChild()", "\"right\" comp_b2 = max(upper_left_b[i], lower_right_b[i]) # print comp_a1, comp_a2, comp_b1, comp_b2 # do_overlap", "n = 20000 # 230.0411 seconds (~538x slower for 200x growth; expected 1528x", "ignore entry continue if node.isLeafNode() == True: # could have a safe path", "to make strong running time estimates; the reason is that # otherwise the", "are distinct def getAllRectangleCloseAncestors(self): start_rectangle_nodes = [x for x in self.getNodes() if x.getEntry().getMBR().isRaw()", "= lower_right[0] margin = x2 - x1 return margin if self.getDimension() == 2:", "root = self.getRootEntry().getChild() return self.toLeafStatusStringHelper(root) def toLeafStatusStringHelper(self, node): if node == None: return", "True: overlap_ratio = 1 else: overlap_ratio = 0 else: overlap_ratio = overlap_area /", "# these numbers are for upper-left's in (100, 10100) and # lower-right's in", "int(100 + random.randint(0, k) * 100) # z1 = int(100 + random.randint(0, k)", "split_result adjust_result = RTree.rstarAdjustTree(self, l, [e, ee], True) ended_with_split2, resulting_entries_from_split = adjust_result if", "im.save(\"tree.png\", \"PNG\") \"\"\" # image = PythonMagick.Image(PythonMagick.Geometry(\"768x768\"), \"white\") image = PythonMagick.Image(PythonMagick.Geometry(\"1536x1536\"), \"white\") root_entry", "path to a leaf where the leaf mbr # is not contained by", "class RTree: def __init__(self): root_node = RTreeNode(None, [], True) root_mbr = CompositeMBR(None, None,", "= min_S_value_d_S_pair_candidates[0] chosen_d_value = chosen_d_S_pair[0] return chosen_d_value @staticmethod def rstarChooseSplitIndex(entries, axis, M, m):", "== min_enlargement_value] candidate_entries = [mbr_to_entry_dict[x[1]] for x in candidate_tagged_enlargement_values] return candidate_entries def chooseEntriesWithMinimalAreaEnlargement(self,", "or reference_mbr.doesEnclose(curr_mbr) == True: # heapq.heappush(heap,pair) priority_tagged_internal_entries.sort(key = lambda x: x[0], reverse =", "close_ancestor_entry_list = entry_to_close_ancestor_entry_list_pair print \"start rectangle:\", entry.getMBR().toString() for close_ancestor_entry in close_ancestor_entry_list: print \"close", "by a rectangle in conflict x-tree, # add actual rectangles to conflict x-tree,", "on 2016-08-25 to fix overlap logic for determining when to attempt an overlap-minimal", "in xrange(i + 1, self.getDimension()): comp_2a = upper_left[j] comp_2b = lower_right[j] term2 =", "toNumChildrenString(self): root = self.getRootEntry().getChild() return self.toNumChildrenStringHelper(root) def toNumChildrenStringHelper(self, node): if node == None:", "raise Exception() if overlap_ratio > RTree.MAX_OVERLAP_RATIO: # raise Exception() result2 = self.xtreeOverlapMinimalSplit(node, entry)", "rectangles to conflict x-tree, # use as priority (prefer_contained, prefer_large_area_if_contained_else_small) if self.getRootEntry().getChild().getNumChildren() ==", "contained rectangle is contained by a rectangle in conflict x-tree, # add actual", "self.setRootEntry(next_root_entry) node1.setParent(next_root) node2.setParent(next_root) else: parent.removeEntry(node.getEntry()) parent.addEntry(entry1) parent.addEntry(entry2) # print \"split #2\" return (RTree.SPLIT,", "return result_mbr def getContainedItem(self): return self.contained_item def getMBRList(self): return [self] def clone(self): upper_left", "False: if have_resulting_second_entry_from_split == True: if (parent.getNumChildren() + 1) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(partner_entry) partner_entry.getChild().setParent(parent)", "== self.getRootEntry().getChild() if is_root_node == True: have_node_str = True overall_str_list = None if", "tagged_enlargement_values if x[0] == min_enlargement_value] candidate_entries = [mbr_to_entry_dict[x[1]] for x in candidate_tagged_enlargement_values] return", "self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() axis = RTree.rstarChooseSplitAxis(E_overall, M, m) result = RTree.rstarChooseSplitIndex(E_overall, axis,", "x in entries] mbr = entry.getMBR() tagged_enlargement_values = [(MBR.getAreaEnlargement(x, mbr), x) for x", "node2) node1.setEntry(entry1) node2.setEntry(entry2) if parent != None: original_entry = parent.retrieveEntryForChild(curr_node) parent.removeEntry(original_entry) if node", "questionable if this is really necessary for entry in entries: curr_node = entry.getChild()", "entry in entries: RTreeEntry.drawHelper(tree, entry, image, depth) @staticmethod def drawHelper(tree, entry, image, depth):", "= RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) conflict_x_tree.insert(next_entry) elif node.isLeafNode() == False: # if we made", "getUpperLeft(self): return self.upper_left def getLowerRight(self): return self.lower_right def getArea(self): upper_left = self.getUpperLeft() lower_right", "def __init__(self, parent, entries, is_leaf, entry = None, split_history_root_dimension = None, is_supernode =", "in xrange(component_mbr_list[0].getDimension()): components = [x[i] for x in points] min_comp_value = min(components) max_comp_value", "\" \") + \")\" return overall_str def toEntriesArePresentString(self): root = self.getRootEntry().getChild() return self.toEntriesArePresentStringHelper(root)", "leaf_node): Q = [] self.condenseTreeHelper(leaf_node, Q) # Q is in order of low-level", "= entry.getMBR() entries = self.getEntries() tagged_mbr_list = [(x.getMBR(), x) for x in entries]", "descendant candidates on occasion, # if containment query for conflict x-tree returns entries", "RTree.SPLIT: # added_node.setParent(node) if node.getNumChildren() > node.getMaximumNumEntriesPerNode(): split_result = self.xtreeSplitNode(node, entry) was_successful, entry_collection1,", "__init__(self, mbr, child): self.mbr = mbr self.child = child def getMBR(self): return self.mbr", "== True: curr_leaf_status = \"-\" if (node.getParent() == None or (node.getParent() != None", "in x[0]], [y.getMBR() for y in x[1]]) for x in low_comp_distributions] low_mbr_pairs =", "lower_right: image.strokeColor(\"none\") image.fillColor(color) center_x = next_x1 center_y = next_y1 radius = 4 perimeter_x", "None if have_node_str == True: curr_depth = \"-\" if node.getNumEntries() != 0 else", "of actual rectangles # or leaves in r-tree; these times assume \"maximal disjointedness\"", "in entry_group1: next_curr_node = curr_entry.getChild() if curr_entry != entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node1) for curr_entry", "should add to conflict x-tree result_entry_list.append(entry) raw_mbr = mbr next_mbr = raw_mbr.clone() next_node", "node.getEntries() children = node.getChildren() have_node_str = True overall_str_list = None if have_node_str ==", "if len(heap) != 0: (priority,item) = heapq.heappop(heap) elif len(internal_node_stack_deque) != 0: item =", "condenseTree(L) # if the root has only one child (and it is not", "node6) node6.setEntry(entry6) tree.insert(entry6) mbr7 = RawMBR(point7, (110, 200, 100), point7) node7 = RTreeNode(None,", "== True: overlap_ratio = 1 else: overlap_ratio = 0 else: overlap_ratio = overlap_area", "have_node_str == True: curr_leaf_status = \"-\" if node.isLeafNode() == False else \"+\" overall_str_list", "= RTreeNode(parent, entry_collection1, None, entry1) entry1.setChild(node1) entry2 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection2), None) node2 = RTreeNode(parent,", "tagged_area_values] min_area = min(area_values) candidate_tagged_area_values = [x for x in tagged_area_values if x[0]", "== True: # print \"underfull\" parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) # don't use isLeafNode()", "leaf_node, [entry], False) else: split_result = self.rstarSplitNode(leaf_node, entry) l, ll, e, ee =", "i in range(len(window_left_sizes))] window_size_pairs = [x for x in window_size_pairs if x[0] <=", "= lower_right_a[i] comp_b1 = upper_left_b[i] comp_b2 = lower_right_b[i] side = max(0, min(comp_a2, comp_b2)", "hasConsistentNonTraditionalLeafDepthValues(self): root = self.getRootEntry().getChild() curr_node = root depth = 0 while curr_node.isLeafNode() ==", "== curr_entry: return True else: return False else: entries = curr_entry.getChild().getEntries() for next_entry", "= tree.getRootEntry() else: entry = node.getParent().retrieveEntryForChild(node) \"\"\" entry = parent.retrieveEntryForChild(node) children = [x.getChild()", "== RTree.SUPERNODE: pass # print \"no split\" return (RTree.NO_SPLIT, [node]) def rstarInsert(self, entry):", "reference_entry): # repeatedly pop nodes, prune using enclosure/containment # w.r.t. reference rectangle, add", "curr_node return None \"\"\" # a little stilted since we don't need a", "def chooseLeafHelper(self, entry, node): if node.isLeafNode() == True: if node == self.getRootEntry().getChild(): return", "node8.setEntry(entry8) # problem here tree.insert(entry8) print tree.toString() print tree.doEnclosureQuery(curr_mbr2) curr_mbr3 = RawMBR((50, 100,", "slower) # n = 5500 # 23.899 seconds (~55.96x slower for 55x growth;", "max_components = [] for i in xrange(base_mbr.getDimension()): components = [x[i] for x in", "result == False: return False return True def toNumChildrenString(self): root = self.getRootEntry().getChild() return", "= node.getParent() entry1 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection1), None) node1 = RTreeNode(parent, entry_collection1, None, entry1) entry1.setChild(node1)", "and to add all-start-rectangles close-ancestor finding, # which for a well-formed r-tree, takes", "self.getRootEntry().getChild(): parent.addEntry(entry1) parent.addEntry(entry2) node1.setParent(parent) node2.setParent(parent) else: next_root = RTreeNode(None, [entry1, entry2], False) self.getRootEntry().setChild(next_root)", "O(n * log(n)) time at worst; # assumes that rectangles are distinct #", "len(matching_overlap_value_tagged_candidate_distributions) > 1: next_candidate_distributions = next_next_candidates mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for y in x[0]],", "= (entry_group1, entry_group2, axis) return next_result def xtreeOverlapMinimalSplit(self, node, entry): if node.getSplitHistoryRootDimension() ==", "time2 - time1 print \"time difference:\", time_diff, \"seconds\" # raise Exception() for entry_to_close_ancestor_entry_list_pair", "- time1 print \"time difference:\", time_diff, \"seconds\" # raise Exception() for entry_to_close_ancestor_entry_list_pair in", "m] window_size_pairs = [(window_left_sizes[i], len(entries) - window_left_sizes[i]) for i in range(len(window_left_sizes))] window_size_pairs =", "on 2016-11-03 to re-structure and modify adjustTree(); # stop at root instead of", "getArea(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() sides = [] for i in", "[], True) next_entry = RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) curr_x_tree.insert(next_entry) union_area = curr_x_tree.getUnionArea() multi_overlap_ratio =", "RTreeEntry.drawHelper(tree, entry, image, depth) @staticmethod def drawHelper(tree, entry, image, depth): node = entry.getChild()", "image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2)) if len(entries) == 0: parent = entry.getChild().getParent() mbr =", "(40, 40) elif i % 4 == 2: upper_left = (60, 60) lower_right", "kick out close descendant candidates on occasion, # if containment query for conflict", "two strange things going on - saturation occurs # if we increase n", "upper_left_b = mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight() do_overlap = True # assume that rectangles", "if entry.getChild().getParent() == None: raise Exception() \"\"\" # print tree.toString() # for entry", "print \"start rectangle:\", entry.getMBR().toString() for close_ancestor_entry in close_ancestor_entry_list: print \"close ancestor:\", close_ancestor_entry.getMBR().toString() #", "= is_supernode def getSplitHistoryRootDimension(self): return self.split_history_root_dimension def setSplitHistoryRootDimension(self, dim): self.split_history_root_dimension = dim def", "root_mbr_is_contained == True else 1) * root_mbr_area # min-pq priority = (first_priority_component, second_priority_component)", "# overall_str_list = [] if node.getNumChildren() == 0 else [node.getEntry().getMBR().toString(), str(node)] for entry", "speak of modifying mbr if we plan on keeping the node if node.isUnderfull()", "None if leaf_node.isFull() == False: leaf_node.addEntry(entry) entry.getChild().setParent(leaf_node) adjust_result = RTree.rstarAdjustTree(self, leaf_node, [entry], False)", "= result next_result = (entry_group1, entry_group2, axis, False) return next_result def xtreeSupernodeInsert(self, node,", "self.condenseTree(leaf_node) # root = self.getRootEntry().getChild() \"\"\" if root.getNumChildren() == 1: # shorten tree", "2 + change_y ** 2) return distance class RTreeNode: def __init__(self, parent, entries,", "\"insert\" return self.xtreeInsertHelper(entry, self.getRootEntry().getChild()) SPLIT = 0 SUPERNODE = 1 NO_SPLIT = 2", "= self.resolveEnlargementTie(candidate_entries, entry) \"\"\" chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return self.rstarChooseLeafHelper(entry, chosen_child)", "plan on keeping the node if node.isUnderfull() == False: # print \"not underfull\"", "+ offset) next_x2, next_y2 = (multiplier * x2 + offset, multiplier * y2", "for 100x growth; expected 664x slower) # n = 14500 # 170.053 seconds", "parent of entry child if curr_entry.getMBR().isRaw() == True: if entry == curr_entry: return", "tree.delete(entry5) tree.delete(entry6) tree.delete(entry7) tree.delete(entry8) \"\"\" print tree.toString() tree2 = RTree() import random entries", "return False def isComposite(self): return False def getUpperLeft(self): return self.upper_left def getLowerRight(self): return", "as new root its only child pass def condenseTree(self, leaf_node): Q = []", "self.getRootEntry().getChild() return self.toStringHelper(root) def toStringHelper(self, node): if node == None: return \"\" entries", "self.doOverlapQueryHelper(mbr, curr_entry, partial_result, without_borders) # returns entries def doEnclosureQuery(self, mbr): partial_result = []", "lambda x: x[0], reverse = True) for priority_tagged_internal_entry in priority_tagged_internal_entries: priority, internal_entry =", "print tree.toString() # for entry in entries[0 : 4]: # print \"supernodes:\", [x", "+ result[axis][1] mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for y in x[0]], [y.getMBR() for y in", "for priority_tagged_internal_entry in priority_tagged_internal_entries: priority, internal_entry = priority_tagged_internal_entry item = internal_entry internal_node_stack_deque.appendleft(item) #", "# if node.isNonTraditionalLeafNode() == True: if node.isLeafNode() == True and node == self.getRootEntry().getChild():", "def toLeafStatusStringHelper(self, node): if node == None: return \"\" entries = node.getEntries() children", "mbr.toString(), entry.getMBR().toString() if mbr.doesEnclose(entry.getMBR()) == True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for curr_entry", "added_node.setParent(node) if split_status == RTree.SPLIT: # added_node.setParent(node) if node.getNumChildren() > node.getMaximumNumEntriesPerNode(): split_result =", "mbr_list: next_mbr = RawMBR(curr_mbr.getUpperLeft(), curr_mbr.getLowerRight(), None) next_node = RTreeNode(None, [], True) next_entry =", "# if containment query for conflict x-tree returns entries matching_entries = conflict_x_tree.doContainmentQuery(mbr) for", "\"left\" comp_b1 = min(upper_left_b[i], lower_right_b[i]) # b \"right\" comp_b2 = max(upper_left_b[i], lower_right_b[i]) #", "modifying mbr if we plan on keeping the node if node.isUnderfull() == False:", "y2 - y1 distance = math.sqrt(change_x ** 2 + change_y ** 2) return", "= contained_item def isRaw(self): return True @staticmethod def makeMBRFromPoint(point): upper_left = point lower_right", "depth): for entry in entries: RTreeEntry.drawHelper(tree, entry, image, depth) @staticmethod def drawHelper(tree, entry,", "if entry == curr_entry: return True else: return False else: entries = curr_entry.getChild().getEntries()", "(1300, 1330, 1055))] \"\"\" # n = 10,000 works in 1 min. 54", "for curr_entry in entries: curr_child = curr_entry.getChild() (self.child_to_entry_dict)[curr_child] = curr_entry self.split_history_root_dimension = split_history_root_dimension", "a non-point mbr to a point\") return mbr.getUpperLeft() def getVec(self): return self.vec def", "of actual rectangles # for an r-tree and O(n * log(n)) time at", "10000) # upper_left = (x1, y1, z1) # lower_right = (x2, y2, z2)", "<= left_value2 and right_value1 >= right_value2 if component_does_enclose == False: does_enclose = False", "return self.upper_left def getLowerRight(self): return self.lower_right def getIDValue(self): return self.id_value class Point: def", "100), None) print tree.doContainmentQuery(curr_mbr3) # raise Exception() print tree.doOverlapQuery(curr_mbr2) # raise Exception() print", "= curr_entry # internal_node_stack_deque.appendleft(item) priority = (first_priority_component, second_priority_component) priority_tagged_internal_entry = (priority, curr_entry) priority_tagged_internal_entries.append(priority_tagged_internal_entry)", "= CompositeMBR.makeMBR(mbr_group1) curr_overall_mbr2 = CompositeMBR.makeMBR(mbr_group2) for curr_entry in entry_group1: next_curr_node = curr_entry.getChild() if", "we made it this far, we should add to conflict x-tree result_entry_list.append(entry) raw_mbr", "for close_ancestor_entry in close_ancestor_entry_list: print \"close ancestor:\", close_ancestor_entry.getMBR().toString() if __name__ == \"__main__\": main()", "False # returns entries def doContainmentQuery(self, mbr): partial_result = [] self.doContainmentQueryHelper(mbr, self.getRootEntry(), partial_result)", "point lower_right = point result_mbr = RawMBR(upper_left, lower_right, point) return result_mbr def getContainedItem(self):", "left_value2 = mbr.getUpperLeft()[i] right_value1 = self.getLowerRight()[i] right_value2 = mbr.getLowerRight()[i] component_does_enclose = left_value1 <=", "ignore_entry: # ignore node if its entry matches the ignore entry continue if", "False break return does_enclose def isEqualTo(self, mbr): upper_left1 = self.getUpperLeft() lower_right1 = self.getLowerRight()", "= time2 - time1 print \"time difference:\", time_diff, \"seconds\" # raise Exception() for", "getDistance(point1, point2): x1, y1 = point1 x2, y2 = point2 change_x = x2", "self.split_history_root_dimension = split_history_root_dimension self.is_supernode = is_supernode self.entry = entry def getEntry(self): return self.entry", "= -1 * curr_mbr_area item = curr_entry pair = (priority,item) heapq.heappush(heap,pair) elif curr_mbr.isRaw()", "drawHelper(tree, entry, image, depth): node = entry.getChild() entries = node.getEntries() mbr_list = [entry.getMBR()]", "mbr_group2 = [x.getMBR() for x in entry_group2] curr_overall_mbr1 = CompositeMBR.makeMBR(mbr_group1) curr_overall_mbr2 = CompositeMBR.makeMBR(mbr_group2)", "getMBRList(self): return [self] def clone(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() contained_item =", "result = upper_left_matches == True and lower_right_matches == True return result class CompositeMBR(MBR):", "entries = self.getEntries() tagged_mbr_list = [(x.getMBR(), x) for x in entries] tagged_overlapped_mbr_list =", "\"white\") image = PythonMagick.Image(PythonMagick.Geometry(\"1536x1536\"), \"white\") root_entry = self.getRootEntry() entries = [root_entry] RTreeEntry.draw(self, entries,", "= [x[1] for x in pair_list] return items def getSize(self): return len(self.heap) import", "self.getRootEntry(), partial_result) return partial_result def doEnclosureQueryHelper(self, mbr, entry, partial_result): if entry.getMBR().isRaw() == True:", "= node.getParent() curr_entries = node.getEntries() entry = None \"\"\" if node.getParent() == None:", "implicit (determined using points sampled) and assumed to be consistent # we never", "times assume \"maximal disjointedness\" # and depth-first stack for internal nodes and #", "True: if node == self.getRootEntry().getChild(): return node else: return node.getParent() else: entries =", "subtrees unti lthe leaf L that contains E is found # remove E", "/ 2 next_x = multiplier * x next_y = multiplier * y image.strokeColor(\"none\")", "self.getLowerRight() if self.getDimension() == 0: raise Exception() if self.getDimension() == 1: x1 =", "10), (9, 10, 10), (6, 10, 10), (9, 10, 10), (3, 10, 10),", "\\ ((433, 499, 483), (1300, 1330, 1055))] \"\"\" # n = 10,000 works", "(~1x slower for 1x growth; expected 1x slower) # n = 1000 #", "overall_str = \"(\" + string.join(overall_str_list, \" \") + \")\" return overall_str def toLeafStatusString(self):", "for a leaf, # which is not the case when we initially insert", "# n = 6,000 works in 56.672 sec. for pypy with m =", "= PythonMagick.Color(65535, 0, 0, 32767) elif color_choice == 1: color = PythonMagick.Color(0, 0,", "== RTree.SPLIT: # added_node.setParent(node) if node.getNumChildren() > node.getMaximumNumEntriesPerNode(): split_result = self.xtreeSplitNode(node, entry) was_successful,", "low_constituent_mbr_list_pairs] low_margin_values = [x[0].getMarginValue() + x[1].getMarginValue() for x in low_mbr_pairs] low_margin_value_sum = sum(low_margin_values)", "node never has a raw mbr # leaf is a non-traditional leaf leaf_node", "self.getUpperLeft() lower_right1 = self.getLowerRight() upper_left2 = mbr.getUpperLeft() lower_right2 = mbr.getLowerRight() is_equal = upper_left1", "= RTree.rstarChooseSplitAxis(E_overall, M, m) result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2 =", "mbr_pair_tagged_candidate_distributions] overlap_values = [x[0] for x in overlap_value_tagged_candidate_distributions] min_overlap_value = min(overlap_values) matching_overlap_value_tagged_candidate_distributions =", "for this, as internal nodes can temporarily look like leaf nodes # keep_nodes", "1): raise Exception() window_left_sizes = [m - 1 + k for k in", "entry) entry_collection1, entry_collection2, dimension = result1 mbr_collection1 = [x.getMBR() for x in entry_collection1]", "# two strange things going on - saturation occurs # if we increase", "mbr.getUpperLeft() lower_right_matches = self.getLowerRight() == mbr.getLowerRight() result = upper_left_matches == True and lower_right_matches", "\"\"\" # for entry in entries[0 : 4]: # for entry in entries[0", "we plan on keeping the node if node.isUnderfull() == False: # print \"not", "root_entry.getChild() root_mbr = root_entry.getMBR() root_mbr_is_actual = root_mbr.isRaw() root_mbr_is_contained = reference_mbr.doesEnclose(root_mbr) root_mbr_area = root_mbr.getArea()", "added_nodes]) # print \"supernode #1\" return (RTree.SUPERNODE, [node]) elif split_status == RTree.SUPERNODE: pass", "m): result = RTree.rstarGenDistributions(entries, M, m) candidate_distributions = None candidate_distributions = result[axis][0] +", "import math for i in xrange(n): upper_left = None lower_right = None \"\"\"", "search all entries of RN to find E.mbr # else: # RN is", "contained by a rectangle in conflict x-tree, # add actual rectangles to conflict", "leaf_node.removeEntry(entry) self.condenseTree(leaf_node) # root = self.getRootEntry().getChild() \"\"\" if root.getNumChildren() == 1: # shorten", "def getAreaEnlargement(base_mbr, mbr): base_mbr_area = base_mbr.getArea() enlarged_mbr = MBR.getEnlargedMBR(base_mbr, mbr) enlarged_mbr_area = enlarged_mbr.getArea()", "+ \")\" return overall_str def toLeafStatusString(self): root = self.getRootEntry().getChild() return self.toLeafStatusStringHelper(root) def toLeafStatusStringHelper(self,", "# if RN is a leaf node # search all entries of RN", "slower) # n = 1000 # 1.1649 seconds (~2.72x slower for 10x growth;", "node if its entry matches the ignore entry continue if node.isLeafNode() == True:", "result == None: continue else: return curr_node return None \"\"\" # a little", "/ denominator)) # for n = 5500 # k = int(round((10000 * math.log(10000,", ">= self.getMaximumNumEntriesPerNode() def isUnderfull(self): return self.getNumEntries() < self.getMinimumNumEntriesPerNode() def retrieveEntryForChild(self, node): return (self.child_to_entry_dict)[node]", "result_list.append(curr_tuple) return result_list @staticmethod def rstarChooseSplitAxis(entries, M, m): result = RTree.rstarGenDistributions(entries, M, m)", "(RTree.SUPERNODE, [node]) elif split_status == RTree.SUPERNODE: pass # print \"no split\" return (RTree.NO_SPLIT,", "= internal_node_stack_deque.popleft() # (priority,item) = heapq.heappop(heap) entry = item node = entry.getChild() mbr", "if x[0] == min_overlap_value] next_next_candidates = [x[1] for x in matching_overlap_value_tagged_candidate_distributions] if len(matching_overlap_value_tagged_candidate_distributions)", "x1, y1 = upper_left x2, y2 = lower_right multiplier = 1 / (1.0", "= RTreeNode(None, [], True) entry3 = RTreeEntry(mbr3, node3) node3.setEntry(entry3) tree.insert(entry3) mbr4 = RawMBR(point4,", "for entry in entries[0 : 15]: for entry in entries: tree2.insert(entry) \"\"\" if", "mbr_list_pair_tagged_candidate_distributions] combined_area_tagged_next_candidate_distributions = [(x[0][0].getArea() + x[0][1].getArea(), x[1]) for x in mbr_pair_tagged_next_candidate_distributions] combined_area_values =", "None, dimension) else: return (True, entry_collection3, entry_collection4, dimension) else: return (True, entry_collection1, entry_collection2,", "self.id_value import string class RTree: def __init__(self): root_node = RTreeNode(None, [], True) root_mbr", "node = entry.getChild() entries = node.getEntries() mbr_list = [entry.getMBR()] for mbr in mbr_list:", "in range(1, M - 2 * m + 2 + 1)] window_left_sizes =", "CompositeMBR.makeMBR(x[1])) for x in low_constituent_mbr_list_pairs] low_margin_values = [x[0].getMarginValue() + x[1].getMarginValue() for x in", "[((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for x in mbr_list_pair_tagged_candidate_distributions] overlap_value_tagged_candidate_distributions = [(MBR.findOverlapArea(x[0][0], x[0][1]), x[1]) for", "= 20000 # 230.0411 seconds (~538x slower for 200x growth; expected 1528x slower)", "setToSuperNode(self, is_supernode): self.is_supernode = is_supernode def getSplitHistoryRootDimension(self): return self.split_history_root_dimension def setSplitHistoryRootDimension(self, dim): self.split_history_root_dimension", "have_resulting_second_entry_from_split == True: if (parent.getNumChildren() + 1) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(partner_entry) partner_entry.getChild().setParent(parent) return RTree.rstarAdjustTreeHelper(tree,", "entry.getMBR() location = Point.toPoint(mbr) x, y = location multiplier = 1 / (1.0", "result[axis][1] mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for y in x[0]], [y.getMBR() for y in x[1]]),", "def findOverlapArea(mbr_a, mbr_b): if MBR.doOverlap(mbr_a, mbr_b) == False: return 0 else: upper_left_a =", "pair_list = self.heap items = [x[1] for x in pair_list] return items def", "image, depth): for entry in entries: RTreeEntry.drawHelper(tree, entry, image, depth) @staticmethod def drawHelper(tree,", "def rstarChooseLeafHelper(self, entry, node): if node.isLeafNode() == True: if node == self.getRootEntry().getChild(): return", "y1 + offset) next_x2, next_y2 = (multiplier * x2 + offset, multiplier *", "= ee.getChild() if (self.getRootEntry().getChild().getNumEntries() + 1) <= self.getRootEntry().getChild().getMaximumNumEntriesPerNode(): self.getRootEntry().getChild().addEntry(ee) ll.setParent(self.getRootEntry().getChild()) else: split_result =", "this line presumes that we have parent set correctly for a leaf, #", "else: upper_left_a = mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight()", "print comp_a1, comp_a2, comp_b1, comp_b2 # do_overlap = True if without_borders == True:", "[x for x in overlap_value_tagged_candidate_distributions if x[0] == min_overlap_value] next_next_candidates = [x[1] for", "# we never split a super-node # updated on 2016-08-23 to fix traditional/non-traditional", "# added_node.setParent(node) if node.getNumChildren() > node.getMaximumNumEntriesPerNode(): split_result = self.xtreeSplitNode(node, entry) was_successful, entry_collection1, entry_collection2,", "def doOverlapQueryHelper(self, mbr, entry, partial_result, without_borders): if entry.getMBR().isRaw() == True: if MBR.doOverlap(entry.getMBR(), mbr,", "result; # if we made it this far, we should add to conflict", "below M # updated on 2016-11-06 to add single-start-rectangle-based # close-descendant finding that", "list(set(Q)) Q.reverse() for curr_node in Q: curr_entry = curr_node.getEntry() # print \"mbr:\", curr_entry.getMBR().toString()", "4]: # print \"supernodes:\", [x for x in tree.getNodes() if x.isSuperNode() == True],", "= [x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return (have_resulting_second_entry_from_split, resulting_entries_from_split)", "+ 1) <= self.getRootEntry().getChild().getMaximumNumEntriesPerNode(): self.getRootEntry().getChild().addEntry(ee) ll.setParent(self.getRootEntry().getChild()) else: split_result = self.rstarSplitNode(self.getRootEntry().getChild(), ee) l, ll,", "it this far, we should add children to priority queue entries = node.getEntries()", "RTree() import random entries = [] # lower_rights = [(3, 10, 10), (1,", "m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries() axis = RTree.rstarChooseSplitAxis(E_overall, M,", "for determining when to attempt an overlap-minimal split # updated on 2016-11-03 to", "random entries = [] # lower_rights = [(3, 10, 10), (1, 10, 10),", "None, None, None) dimension = None result1 = self.xtreeTopologicalSplit(node, entry) entry_collection1, entry_collection2, dimension", "[x.getMBR() for x in entry_collection1] mbr_collection2 = [x.getMBR() for x in entry_collection2] #", "reduce(lambda x, y: x * y, sides) return intersection_volume def getMarginValue(self): upper_left =", "320)), \\ ((945, 260, 1091), (1932, 332, 1133)), \\ ((262, 221, 872), (500,", "= upper_left_matches == True and lower_right_matches == True return result class CompositeMBR(MBR): def", "entry.getChild().getEntries() children = [x.getChild() for x in curr_entries] mbr_list = [x.getMBR() for x", "x in entry_collection1] mbr_collection2 = [x.getMBR() for x in entry_collection2] mbr1 = CompositeMBR.makeMBR(mbr_collection1)", "lower_right1 == lower_right2 return is_equal class RawMBR(MBR): def __init__(self, upper_left, lower_right, contained_item): MBR.__init__(self,", "to a leaf where the leaf mbr # is not contained by reference", "of non-existent parent of root; # also, we implement delete(); note that our", "result = pair return result def toList(self): pair_list = self.heap items = [x[1]", "explicitly create new entries, # which we do do occasionally # note that", "= [curr_leaf_status] else: overall_str_list = [] for entry in entries: child = entry.getChild()", "well-formed r-tree, this takes O(n * log(n)) time, # where n is number", "whether to include associated entry in result; # if we made it this", "(None, None, None, True) else: m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall =", "split_result = self.rstarSplitNode(leaf_node, entry) l, ll, e, ee = split_result adjust_result = RTree.rstarAdjustTree(self,", "self.child = child def getMBR(self): return self.mbr def setMBR(self, mbr): self.mbr = mbr", "# this line presumes that we have parent set correctly for a leaf,", "== False else \"+\" overall_str_list = [curr_leaf_status] else: overall_str_list = [] for entry", "str(self.getEntries()) class RTreeEntry: def __init__(self, mbr, child): self.mbr = mbr self.child = child", "tree.adjustTree(tree, parent, [entry], False, False) else: parent.addEntry(entry) entry.getChild().setParent(parent) split_result = tree.splitNode(parent, partner_entry) l,", "x in entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) \"\"\" def rstarSplitNode(self, node, entry): curr_node =", "= [] for curr_entry in entries: base_mbr = curr_entry.getMBR() curr_mbr = MBR.getEnlargedMBR(base_mbr, mbr)", "return (RTree.NO_SPLIT, [node]) def rstarInsert(self, entry): leaf_node = self.rstarChooseLeaf(entry) adjust_result = None if", "image.fillColor(\"none\") image.strokeWidth(4) image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2)) if len(entries) == 0: parent = entry.getChild().getParent()", "# only makes sense to speak of modifying mbr if we plan on", "component_does_enclose == False: does_enclose = False break return does_enclose def isEqualTo(self, mbr): upper_left1", "entry.getMBR().toString() if mbr.doesEnclose(entry.getMBR()) == True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for curr_entry in", "on average # for start rectangle taken from set of actual rectangles #", "= self.toLeafStatusStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \" \")", "entry2], False) self.getRootEntry().setChild(next_root) next_root.setEntry(self.getRootEntry()) node1.setParent(next_root) node2.setParent(next_root) pass return (node1, node2, entry1, entry2) @staticmethod", "elif node.isLeafNode() == False: # if we made it this far, we should", "have_node_str == True: curr_leaf_status = str(node.getNumChildren()) overall_str_list = [curr_leaf_status] else: overall_str_list = []", "x in tagged_enlargement_values if x[0] == min_enlargement_value] candidate_entries = [mbr_to_entry_dict[x[1]] for x in", "chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return chosen_entry def xtreeInsert(self, entry): # print", "upper_left self.lower_right = lower_right def isRaw(self): return False def isComposite(self): return False def", "print tree.toString() curr_root = tree.getRootEntry().getChild() mbr1 = RawMBR(point1, (110, 200, 100), point1) node1", "True overall_str_list = None if have_node_str == True: curr_depth = \"-\" if node.getNumEntries()", "== ignore_entry: # ignore node if its entry matches the ignore entry continue", "32767) elif color_choice == 1: color = PythonMagick.Color(0, 0, 65535, 32767) elif color_choice", "100, 0), (100, 100, 0)) curr_mbr2 = RawMBR((50, 100, 0), (50, 100, 0),", "self.findLeafHelper(entry, self.getRootEntry()) def findLeafHelper(self, entry, curr_entry): \"\"\" if node.isLeafNode() == False: curr_mbr =", "mbr2 = CompositeMBR.makeMBR(mbr_collection2) overlap_area = MBR.findOverlapArea(mbr1, mbr2) area1 = mbr1.getArea() area2 = mbr2.getArea()", "getLowerRight(self): return self.lower_right def getIDValue(self): return self.id_value class Point: def __init__(self, vec, id_value):", "entries: if MBR.doOverlap(curr_entry.getMBR(), mbr) == True: self.doOverlapQueryHelper(mbr, curr_entry, partial_result, without_borders) # returns entries", "math.sqrt(change_x ** 2 + change_y ** 2) return distance class RTreeNode: def __init__(self,", "comp_b2 and comp_a2 > comp_b1 else: do_overlap = do_overlap and comp_a1 <= comp_b2", "(priority,item) heapq.heappush(heap,pair) # print entry_pq # raise Exception() result_entry_list = [] self.getRectangleCloseDescendantsHelper(heap, reference_mbr,", "comp_a2 > comp_b1 else: do_overlap = do_overlap and comp_a1 <= comp_b2 and comp_a2", "pair = (priority,item) # if curr_mbr.doesEnclose(reference_mbr) == True or reference_mbr.doesEnclose(curr_mbr) == True: #", "892, 131), (1543, 1838, 669)), \\ ((879, 319, 789), (1877, 744, 791)), \\", "= [] # entry_pq.push(root_entry, priority) item = root_entry pair = (priority,item) heapq.heappush(heap,pair) #", "[x for x in window_size_pairs if x[0] <= M and x[0] >= m", "child_str = self.toNumChildrenStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \"", "parent = node.getParent() curr_entries = node.getEntries() entry = None \"\"\" if node.getParent() ==", "color_choice == 1: color = PythonMagick.Color(0, 0, 65535, 32767) elif color_choice == 2:", "mbr_list = [x.getMBR() for x in entries] mbr = entry.getMBR() tagged_enlargement_values = [(MBR.findOverlapArea(x,", "upper_left = (20, 20) lower_right = (40, 40) elif i % 4 ==", "leaf_node): node = leaf_node parent = node.getParent() if parent != None: curr_entries =", "= True overall_str_list = None if is_root_node == False: overall_str_list = [node.getEntry().getMBR().toString()] #", "x[1]) for x in mbr_pair_tagged_candidate_distributions] overlap_values = [x[0] for x in overlap_value_tagged_candidate_distributions] min_overlap_value", "chosen_entry = entries[0] chosen_child = chosen_entry.getChild() self.setRoot(chosen_child) \"\"\" # if RN is a", "next_next_candidates mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for y in x[0]], [y.getMBR() for y in x[1]]),", "actual rectangles or leaves; # assumes that rectangles are distinct def getAllRectangleCloseAncestors(self): start_rectangle_nodes", "seconds (~398x slower for 145x growth; expected 1040x slower) # n = 20000", "* y, sides) return area @staticmethod def getEnlargedMBR(base_mbr, mbr): mbr_list = [base_mbr, mbr]", "= self.getRootEntry() curr_entries = entry.getChild().getEntries() children = [x.getChild() for x in curr_entries] mbr_list", "== 0: color = PythonMagick.Color(65535, 0, 0, 32767) elif color_choice == 1: color", "* 100) # x2 = int(x1 + random.random() * 100) # y2 =", "chosen_d_S_pair[0] return chosen_d_value @staticmethod def rstarChooseSplitIndex(entries, axis, M, m): result = RTree.rstarGenDistributions(entries, M,", "need a O(log(n)) time operation # to find the entry containing node; just", "x in upper_constituent_mbr_list_pairs] upper_margin_values = [x[0].getMarginValue() + x[1].getMarginValue() for x in upper_mbr_pairs] upper_margin_value_sum", "** (1 / 3.0) / denominator)) # for n = 1000 # k", "== False: if have_resulting_second_entry_from_split == True: if (parent.getNumChildren() + 1) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(partner_entry)", "1305, 1149)), \\ ((800, 709, 871), (1390, 1402, 1548)), \\ ((433, 499, 483),", "split_result return tree.adjustTree(tree, l, [e, ee], True, False) else: return (False, []) \"\"\"", "[(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x in low_constituent_mbr_list_pairs] low_margin_values = [x[0].getMarginValue() + x[1].getMarginValue() for x", "i in xrange(entries[0].getMBR().getDimension()): low_sorted_entries = entries[ : ] low_sorted_entries.sort(key = lambda x: x.getMBR().getUpperLeft()[i])", "priority queue entries = node.getEntries() priority_tagged_internal_entries = [] for curr_entry in entries: #", "[x.getChild().getNumEntries() == 0 for x in self.getEntries()]) is_leaf_node = self.getNumChildren() == 0 return", "node) node.setEntry(entry) entries.append(entry) \"\"\" for i in xrange(10): upper_left = (20, 20) lower_right", "expected 1528x slower) # n = 2000 # n = 1000 # n", "self.getRootEntry().getChild() return self.toNumChildrenStringHelper(root) def toNumChildrenStringHelper(self, node): if node == None: return \"\" entries", "raw_mbr = mbr next_mbr = raw_mbr.clone() next_node = RTreeNode(None, [], True) next_entry =", "for x in entry_collection1] mbr_collection2 = [x.getMBR() for x in entry_collection2] mbr1 =", "= (30, 100, 0) point2 = (40, 100, 0) point3 = (50, 100,", "child): self.mbr = mbr self.child = child def getMBR(self): return self.mbr def setMBR(self,", "Q: curr_entry = curr_node.getEntry() # print \"mbr:\", curr_entry.getMBR().toString() # print \"tree:\", self.toString() self.insert(curr_entry)", "associated mbr does not enclose reference mbr # and associated mbr is not", "not in [x.getChild().getNumEntries() == 0 for x in self.getEntries()]) return is_non_traditional_leaf_node \"\"\" def", "def __init__(self, mbr, child): self.mbr = mbr self.child = child def getMBR(self): return", "# priority = (first_priority_component, second_priority_component) if curr_mbr.isRaw() == True: priority = -1 *", "original_entry = parent.retrieveEntryForChild(curr_node) parent.removeEntry(original_entry) if node != self.getRootEntry().getChild(): parent.addEntry(entry1) parent.addEntry(entry2) node1.setParent(parent) node2.setParent(parent) else:", "RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2 = result next_result = (entry_group1, entry_group2, axis,", "lower_right[j] term2 = comp_2b - comp_2a term = 2 * term1 * term2", "RTreeEntry(mbr1, node1) node1.setEntry(entry1) tree.insert(entry1) mbr2 = RawMBR(point2, (110, 200, 100), point2) node2 =", "node.getEntries() children = node.getChildren() have_node_str = True is_root_node = node == self.getRootEntry().getChild() if", "tree.delete(entry8) \"\"\" print tree.toString() tree2 = RTree() import random entries = [] #", "r-tree; # takes O(n * log(n)) time at worst; # assumes that rectangles", "self.mbr = mbr def getChild(self): return self.child def setChild(self, node): self.child = node", "None: entry = tree.getRootEntry() curr_entries = entry.getChild().getEntries() children = [x.getChild() for x in", "= CompositeMBR.makeMBR([entry.getMBR()]) node.getEntry().setMBR(mbr) # print \"no split\" return (RTree.NO_SPLIT, [node]) if node.isLeafNode() ==", "[] for i in xrange(self.getDimension()): comp1 = upper_left[i] comp2 = lower_right[i] side =", "curr_entry.getChild() if curr_entry != entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node1) for curr_entry in entry_group2: next_curr_node =", "MBR.__init__(self, upper_left, lower_right) self.mbr_list = mbr_list def getMBRList(self): return self.mbr_list def isComposite(self): return", "mbr self.child = child def getMBR(self): return self.mbr def setMBR(self, mbr): self.mbr =", "= self.rstarChooseLeaf(entry) adjust_result = None if leaf_node.isFull() == False: leaf_node.addEntry(entry) entry.getChild().setParent(leaf_node) adjust_result =", "delete(self, E, RN): def findLeaf(self, entry): return self.findLeafHelper(entry, self.getRootEntry()) def findLeafHelper(self, entry, curr_entry):", "return items def getSize(self): return len(self.heap) import math def getDistance(point1, point2): x1, y1", "@staticmethod def findOverlapArea(mbr_a, mbr_b): if MBR.doOverlap(mbr_a, mbr_b) == False: return 0 else: upper_left_a", "entry in entries[0 : 4]: # for entry in entries[0 : 15]: for", "= [x.getEntry() for x in start_rectangle_nodes] start_rectangle_to_close_ancestor_entries_dict = {} for start_rectangle_entry in start_rectangle_entries:", "x[1]) for x in tagged_mbr_list] area_values = [x[0] for x in tagged_area_values] min_area", "curr_node = curr_entry.getChild() result = self.findLeafHelper(entry, curr_node) if result == None: continue else:", "for x in mbr_list_pair_tagged_candidate_distributions] combined_area_tagged_next_candidate_distributions = [(x[0][0].getArea() + x[0][1].getArea(), x[1]) for x in", "has to grow with n # n = 100 # 0.427 seconds (~1x", "self.toString() self.insert(curr_entry) def condenseTreeHelper(self, node, Q): # demote super-node if necessary if node.isSuperNode()", "return is_traditional_leaf_node \"\"\" def isLeafNode(self): # is_leaf_node = (self.getParent() == None and self.getNumChildren()", "PythonMagick.Color(0, 0, 65535, 32767) elif color_choice == 2: color = PythonMagick.Color(0, 65535, 0,", "= lower_right[j] term2 = comp_2b - comp_2a term = 2 * term1 *", "# print mbr.toString(), entry.getMBR().toString() if mbr.doesEnclose(entry.getMBR()) == True: partial_result.append(entry) else: entries = entry.getChild().getEntries()", "split\" return (RTree.NO_SPLIT, [node]) def rstarInsert(self, entry): leaf_node = self.rstarChooseLeaf(entry) adjust_result = None", "node, which can be None if no match is found # finds one", "entry.getChild() mbr = entry.getMBR() if mbr.doesEnclose(reference_mbr) == False and reference_mbr.doesEnclose(mbr) == False: #", "else: return (True, entry_collection3, entry_collection4, dimension) else: return (True, entry_collection1, entry_collection2, dimension) def", "internal_node_stack_deque.appendleft(item) # print \"conflict x-tree:\", conflict_x_tree.toString() # for a well-formed r-tree, this takes", "[]) \"\"\" # assume item is in tree # returns a node, which", "RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) def getRootEntry(self): return self.root_entry def setRootEntry(self, root_entry): self.root_entry =", "of two works import sys # import PythonMagick import heapq from collections import", "it this far, we should add to conflict x-tree result_entry_list.append(entry) raw_mbr = mbr", "curr_entry in entry_collection2: curr_entry.getChild().setParent(node2) node1.setSplitHistoryRootDimension(dimension) node2.setSplitHistoryRootDimension(dimension) if self.getRootEntry().getChild() == node: next_root_entry = RTreeEntry(next_mbr,", "class PriorityQueue: def __init__(self): self.heap = [] def push(self, item, priority): pair =", "= mbr.getLowerRight()[i] component_does_enclose = left_value1 <= left_value2 and right_value1 >= right_value2 if component_does_enclose", "entry1 = RTreeEntry(mbr1, node1) node1.setEntry(entry1) tree.insert(entry1) mbr2 = RawMBR(point2, (110, 200, 100), point2)", "Exception() result2 = self.xtreeOverlapMinimalSplit(node, entry) entry_collection3, entry_collection4, dimension, do_fail = result2 # raise", "close_ancestor_entry in close_ancestor_entry_list: print \"close ancestor:\", close_ancestor_entry.getMBR().toString() # raise Exception() # for entry", "an overlap-minimal split # updated on 2016-11-03 to re-structure and modify adjustTree(); #", "rectangle # decide whether to include associated entry in result; # if we", "# updated on 2016-08-23 to fix traditional/non-traditional isLeafNode() distinction # updated on 2016-08-25", "the directed graph implied by the r-tree # is not acyclic and we", "!= 0: # entry = entry_pq.pop() item = None if len(heap) != 0:", "== 0: root_node = RTreeNode(None, [], True) root_mbr = CompositeMBR(None, None, None) root_entry", "tagged_overlapped_mbr curr_node = curr_entry.getChild() result = self.findLeafHelper(entry, curr_node) if result == None: continue", "comp_a1 < comp_b2 and comp_a2 > comp_b1 else: do_overlap = do_overlap and comp_a1", "\"split #2\" return (RTree.SPLIT, [node1, node2]) else: self.xtreeSupernodeInsert(node, [x.getEntry() for x in added_nodes])", "entries = node.getEntries() candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries =", "log(n)) time at worst; # and to add all-start-rectangles close-ancestor finding, # which", "curr_entries = node.getEntries() entry = None \"\"\" if node.getParent() == None: entry =", "wish to insert using order of high-level to low-level # Q = list(set(Q))", "x-tree continue if entry == ignore_entry: # ignore node if its entry matches", "(-1 if root_mbr_is_contained == True else 1) * root_mbr_area # min-pq priority =", "entries of RN that cover E.mbr # follow the corresponding subtrees unti lthe", "return self.entry def setEntry(self, entry): self.entry = entry def isSuperNode(self): return self.is_supernode def", "in entries[0 : 15]: for entry in entries: tree2.insert(entry) \"\"\" if entry.getChild().getParent() ==", "curr_entry # pair = (priority,item) # if curr_mbr.doesEnclose(reference_mbr) == True or reference_mbr.doesEnclose(curr_mbr) ==", "fixed bug with parent pointers for xtreeInsert(); # have supernode demotion when size", "\"decision point\" \"\"\" if node.isSuperNode() == True: # print \"supernode encountered\" parent =", "x2 * multiplier + offset next_y2 = y2 * multiplier + offset \"\"\"", "= entry.getMBR() if mbr.doesEnclose(reference_mbr) == False and reference_mbr.doesEnclose(mbr) == False: # ignore node", "lower_right_a = mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight() dimension = mbr_a.getDimension() sides", "not contained within reference mbr continue if conflict_x_tree.doEnclosureQueryWithEarlyStopping(mbr) == True: # ignore node", "for x in self.getNodes() if x.getEntry().getMBR().isRaw() == True] start_rectangle_entries = [x.getEntry() for x", "1548)), \\ ((433, 499, 483), (1300, 1330, 1055))] \"\"\" # n = 10,000", "else 1) * root_mbr_area # min-pq priority = (first_priority_component, second_priority_component) # priority =", "if node.isLeafNode() == False: if have_resulting_second_entry_from_split == True: if (parent.getNumChildren() + 1) <=", "string class RTree: def __init__(self): root_node = RTreeNode(None, [], True) root_mbr = CompositeMBR(None,", "10, 10), (3, 10, 10), (1, 10, 10), (3, 10, 10)] # for", "x in combined_area_tagged_next_candidate_distributions] min_combined_area_value = min(combined_area_values) matching_combined_area_tagged_next_candidate_distributions = [x for x in combined_area_tagged_next_candidate_distributions", "point4) node4 = RTreeNode(None, [], True) entry4 = RTreeEntry(mbr4, node4) node4.setEntry(entry4) tree.insert(entry4) mbr5", "entry): # print \"insert\" return self.xtreeInsertHelper(entry, self.getRootEntry().getChild()) SPLIT = 0 SUPERNODE = 1", "== True], tree.getRootEntry().getChild() # tree2.draw() print len(tree2.getNodes()) import time time1 = time.time() result", "self.child = node @staticmethod def draw(tree, entries, image, depth): for entry in entries:", "featuring enclosure and containment queries # dimension is implicit (determined using points sampled)", "\"\"\" im = Image.new(\"RGB\", (768, 768), \"white\") draw = ImageDraw.Draw(im) root = self.getRoot()", "RTree() internal_node_stack_deque = deque() # while len(heap) != 0: while len(internal_node_stack_deque) != 0", "10, 10), (1, 10, 10), (8, 10, 10), (6, 10, 10), (9, 10,", "if node.isLeafNode() == False: curr_mbr = entry.getMBR() entries = self.getEntries() tagged_mbr_list = [(x.getMBR(),", "self.resolveEnlargementTie(candidate_entries, entry) \"\"\" else: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries", "set of actual rectangles # for an r-tree and O(n * log(n)) time", "is contained by a rectangle in conflict x-tree, # add actual rectangles to", "True: have_node_str = True overall_str_list = None if is_root_node == False: overall_str_list =", "# 84.222 seconds (~197x slower for 100x growth; expected 664x slower) # n", "8 and M = 16 # n = 6,000 works in 56.672 sec.", "curr_node.setParent(node) mbr = CompositeMBR.makeMBR([entry.getMBR()]) node.getEntry().setMBR(mbr) # print \"no split\" return (RTree.NO_SPLIT, [node]) if", "curr_entry.getMBR() mbr_to_entry_dict[curr_mbr] = curr_entry mbr_list = [x.getMBR() for x in entries] mbr =", "internal_entry = priority_tagged_internal_entry item = internal_entry internal_node_stack_deque.appendleft(item) # print \"conflict x-tree:\", conflict_x_tree.toString() #", "self.is_supernode = is_supernode self.entry = entry def getEntry(self): return self.entry def setEntry(self, entry):", "def doesEnclose(self, mbr): dimension = self.getDimension() does_enclose = True for i in xrange(dimension):", "= 10000 # k = int(round((20000 * math.log(20000, 2)) ** (1 / 3.0)", "y: x * y, sides) return intersection_volume def getMarginValue(self): upper_left = self.getUpperLeft() lower_right", "candidate_entries def chooseEntriesWithMinimalAreaEnlargement(self, entries, entry): mbr_to_entry_dict = {} for i in range(len(entries)): curr_entry", "= Point.toPoint(mbr) x, y = location multiplier = 1 / (1.0 * 6.5)", "m): result = RTree.rstarGenDistributions(entries, M, m) S_comp_dict = {} for i in xrange(entries[0].getMBR().getDimension()):", "(50, 100, 0), point3) curr_mbr2b = RawMBR((50, 50, 0), (100, 100, 0), HyperRectangle((50,", "[y.getMBR() for y in x[1]]), x) for x in next_candidate_distributions] mbr_pair_tagged_next_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]),", "and (len(entries) - x) <= M and (len(entries) - x) >= m] window_size_pairs", "= root.getEntries() chosen_entry = entries[0] chosen_child = chosen_entry.getChild() self.setRoot(chosen_child) \"\"\" # if RN", "node1 = RTreeNode(parent, entry_collection1, None, entry1) entry1.setChild(node1) entry2 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection2), None) node2 =", "for upper-left's in (100, 10100) and # lower-right's in (ul_i, ul_i + 10000)", "in tagged_area_values if x[0] == min_area] candidate_entries = [x[1] for x in candidate_tagged_area_values]", "= RawMBR(point8, (110, 200, 100), point8) node8 = RTreeNode(None, [], True) entry8 =", "\"(\" + string.join(overall_str_list, \" \") + \")\" return overall_str def toDepthString(self): root =", "if entry == ignore_entry: # ignore node if its entry matches the ignore", "partner_entries] partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if have_resulting_second_entry_from_split == True: parent.removeEntry(entry) if (parent.getNumChildren() +", "True: parent.removeEntry(entry) if (parent.getNumChildren() + 2) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(entry) parent.addEntry(partner_entry) entry.getChild().setParent(parent) partner_entry.getChild().setParent(parent) return", "these times assume \"maximal disjointedness\" # and depth-first stack for internal nodes and", "* curr_mbr_area item = curr_entry pair = (priority,item) heapq.heappush(heap,pair) elif curr_mbr.isRaw() == False:", "+ area2 - overlap_area ovelap_ratio = None if union_area == 0: if mbr1.isEqualTo(mbr2)", "Image.new(\"RGB\", (512, 512), \"white\") \"\"\" im = Image.new(\"RGB\", (768, 768), \"white\") draw =", "min. 54 sec. for pypy with m = 2 and M = 4", "# for n = 14500 # x1 = int(100 + random.randint(0, k) *", "for n = 14500 # x1 = int(100 + random.randint(0, k) * 100)", "entry.getChild() child_str = self.toNumChildrenStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list,", "with m = 8 and M = 16 # these numbers are for", "@staticmethod def drawHelper(tree, entry, image, depth): node = entry.getChild() entries = node.getEntries() mbr_list", "RTree.rstarChooseSplitAxis(E_overall, M, m) result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2 = result", "mbr4 = RawMBR(point4, (110, 200, 100), point4) node4 = RTreeNode(None, [], True) entry4", "/ (1.0 * union_area) # raise Exception() if overlap_ratio > RTree.MAX_OVERLAP_RATIO: # raise", "node, entry): m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries() axis =", "None: return \"\" entries = node.getEntries() children = node.getChildren() have_node_str = True overall_str_list", "entry, image, depth): node = entry.getChild() entries = node.getEntries() mbr_list = [entry.getMBR()] for", "x >= m and (len(entries) - x) <= M and (len(entries) - x)", "node.getParent() if parent != None: curr_entries = node.getEntries() entry = node.getParent().retrieveEntryForChild(node) children =", "(x2 - x1) + 2 * (y2 - y1) return margin surface_area =", "and associated mbr is not contained within reference mbr continue if conflict_x_tree.doEnclosureQueryWithEarlyStopping(mbr) ==", "in entries: child = entry.getChild() child_str = self.toDepthStringHelper(child, depth + 1) curr_str =", "adjust_result if ended_with_split2 == True: e, ee = resulting_entries_from_split l = e.getChild() ll", "# does intersection query def doOverlapQuery(self, mbr, without_borders = False): partial_result = []", "in pair_list] return items def getSize(self): return len(self.heap) import math def getDistance(point1, point2):", "mbr): upper_left1 = self.getUpperLeft() lower_right1 = self.getLowerRight() upper_left2 = mbr.getUpperLeft() lower_right2 = mbr.getLowerRight()", "next_mbr = None if True: # if node.getNumChildren() == 0 and node ==", "entries: if curr_entry.getMBR().doesEnclose(mbr) == True: self.doEnclosureQueryHelper(mbr, curr_entry, partial_result) def doEnclosureQueryWithEarlyStopping(self, mbr): result =", "entry4 = RTreeEntry(mbr4, node4) node4.setEntry(entry4) tree.insert(entry4) mbr5 = RawMBR(point5, (110, 200, 100), point5)", "# for entry in entries[0 : 4]: # print \"supernodes:\", [x for x", "in partner_entries] partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if have_resulting_second_entry_from_split == True: parent.removeEntry(entry) if (parent.getNumChildren()", "need PythonMagick # note that nodes always point to same entries # unless", "for internal nodes and # best-first priority queue for leaf nodes # updated", "self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() == True] keep_nodes = [x for x in self.getNodesForNode(node) if", "# ignore node if its entry matches the ignore entry continue if node.isLeafNode()", "upper_left = point lower_right = point result_mbr = RawMBR(upper_left, lower_right, point) return result_mbr", "((920, 974, 724), (1802, 1524, 1378)), \\ ((911, 953, 196), (1776, 1662, 455)),", "print \"no split\" return (RTree.NO_SPLIT, [node]) def rstarInsert(self, entry): leaf_node = self.rstarChooseLeaf(entry) adjust_result", "= self.heap items = [x[1] for x in pair_list] return items def getSize(self):", "def rstarChooseLeaf(self, entry): return self.rstarChooseLeafHelper(entry, self.getRootEntry().getChild()) def rstarChooseLeafHelper(self, entry, node): if node.isLeafNode() ==", "and M = 4 # n = 1,000 works in 3.428 sec. for", "in entries: child = entry.getChild() child_str = self.toStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str", "for 1x growth; expected 1x slower) # n = 1000 # 1.1649 seconds", "{} for curr_entry in entries: curr_child = curr_entry.getChild() (self.child_to_entry_dict)[curr_child] = curr_entry self.split_history_root_dimension =", "mbr, child): self.mbr = mbr self.child = child def getMBR(self): return self.mbr def", "for i in xrange(10): upper_left = (20, 20) lower_right = (40, 40) mbr", "partial_result) return partial_result def doEnclosureQueryHelper(self, mbr, entry, partial_result): if entry.getMBR().isRaw() == True: if", "print \"time difference:\", time_diff, \"seconds\" # raise Exception() for entry_to_close_ancestor_entry_list_pair in result.items(): entry,", "entry_pq = PriorityQueue() heap = [] # entry_pq.push(root_entry, priority) item = root_entry pair", "718, 1259)), \\ ((808, 926, 151), (889, 1755, 320)), \\ ((945, 260, 1091),", "taken from set of actual rectangles for an r-tree; # takes O(n *", "pass \"\"\" # takes O(log(n)) time on average for start rectangle # taken", "return (have_resulting_second_entry_from_split, resulting_entries_from_split) else: parent = node.getParent() curr_entries = node.getEntries() entry = None", "raise Exception() node.setToSuperNode(False) elif node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): mbr_list = [x.getMBR() in node.getEntries()] curr_x_tree", "332, 1133)), \\ ((262, 221, 872), (500, 279, 1521)), \\ ((332, 886, 493),", "== 0 and node == self.getRootEntry().getChild(): # if node.getNumChildren() == 0: # if", "self.getNumEntries() == 0 return is_traditional_leaf_node \"\"\" def isLeafNode(self): # is_leaf_node = (self.getParent() ==", "upper_left = (100, 100) lower_right = (120, 120) \"\"\" denominator = (100 *", "if have_resulting_second_entry_from_split == True: parent.removeEntry(entry) if (parent.getNumChildren() + 2) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(entry) parent.addEntry(partner_entry)", "self.xtreeOverlapMinimalSplit(node, entry) entry_collection3, entry_collection4, dimension, do_fail = result2 # raise Exception() if do_fail", "dimension = result1 mbr_collection1 = [x.getMBR() for x in entry_collection1] mbr_collection2 = [x.getMBR()", "= None if is_root_node == False: overall_str_list = [node.getEntry().getMBR().toString()] # overall_str_list = [node.getEntry().getMBR().toString(),", "100, 0), 1)) tree = RTree() print tree.toString() curr_root = tree.getRootEntry().getChild() mbr1 =", "node.getEntries() candidate_entries = None # if node.isLeafNode() == True: candidate_entries = self.chooseEntriesWithMinimalOverlapEnlargement(entries, entry)", "== False: leaf_node.addEntry(entry) entry.getChild().setParent(leaf_node) adjust_result = RTree.rstarAdjustTree(self, leaf_node, [entry], False) else: split_result =", "= list(set(Q)) Q.reverse() for curr_node in Q: curr_entry = curr_node.getEntry() # print \"mbr:\",", "= self.hasConsistentNonTraditionalLeafDepthValuesHelper(curr_node, depth, curr_depth + 1) if result == False: return False return", "and reference_mbr.doesEnclose(mbr) == False: # ignore node if associated mbr does not enclose", "= 8 and M = 16 # n = 6,000 works in 56.672", "\"close ancestor:\", close_ancestor_entry.getMBR().toString() # raise Exception() # for entry in entries[0 : 15]:", "x1 = int(100 + random.randint(0, k) * 100) # y1 = int(100 +", "keep_nodes = [x for x in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() == True] keep_nodes =", "<= self.getRootEntry().getChild().getMaximumNumEntriesPerNode(): self.getRootEntry().getChild().addEntry(ee) ll.setParent(self.getRootEntry().getChild()) else: split_result = self.rstarSplitNode(self.getRootEntry().getChild(), ee) l, ll, e, ee", "= candidate_entries[0] chosen_child = chosen_entry.getChild() return self.rstarChooseLeafHelper(entry, chosen_child) def insert(self, entry): return self.xtreeInsert(entry)", "= RTreeEntry(mbr7, node7) node7.setEntry(entry7) tree.insert(entry7) mbr8 = RawMBR(point8, (110, 200, 100), point8) node8", "import time time1 = time.time() result = tree2.getAllRectangleCloseAncestors() time2 = time.time() time_diff =", "for entry in entries[0 : 15]: for entry in entries: # if len(tree.getNodes())", "RTreeEntry(mbr6, node6) node6.setEntry(entry6) tree.insert(entry6) mbr7 = RawMBR(point7, (110, 200, 100), point7) node7 =", "perimeter_y)) else: image.strokeColor(color) image.fillColor(\"none\") image.strokeWidth(4) image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2)) if len(entries) == 0:", "curr_mbr.getLowerRight(), None) next_node = RTreeNode(None, [], True) next_entry = RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) curr_x_tree.insert(next_entry)", "def getLowerRight(self): return self.lower_right def getArea(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() sides", "result_list @staticmethod def rstarChooseSplitAxis(entries, M, m): result = RTree.rstarGenDistributions(entries, M, m) S_comp_dict =", "do_overlap = True # assume that rectangles never have negative area for i", "return chosen_entry def xtreeInsert(self, entry): # print \"insert\" return self.xtreeInsertHelper(entry, self.getRootEntry().getChild()) SPLIT =", "m + 2 + 1)] window_left_sizes = [x for x in window_left_sizes if", "window_size_pairs = [x for x in window_size_pairs if x[0] <= M and x[0]", "x2, y2 = lower_right multiplier = 1 / (1.0 * 6.5) * 0.8", "\"\"\" # n = 10,000 works in 1 min. 54 sec. for pypy", "start rectangle taken from set of actual rectangles # for an r-tree and", "/ 2 offset = (1536 * 0.2) / 2 x1 = 0 y1", "else: entry = self.getRootEntry() curr_entries = entry.getChild().getEntries() children = [x.getChild() for x in", "node.getEntries() mbr_list = [entry.getMBR()] for mbr in mbr_list: upper_left = mbr.getUpperLeft() lower_right =", "== node: next_root_entry = RTreeEntry(next_mbr, None) next_root = RTreeNode(None, [entry1, entry2], None, next_root_entry)", "entry_group1, entry_group2 = result next_result = (entry_group1, entry_group2, axis, False) return next_result def", "child if curr_entry.getMBR().isRaw() == True: if entry == curr_entry: return True else: return", "return tree.adjustTree(tree, parent, [entry], False, False) else: parent.addEntry(entry) entry.getChild().setParent(parent) split_result = tree.splitNode(parent, partner_entry)", "else: entries = entry.getChild().getEntries() for curr_entry in entries: if curr_entry.getMBR().doesEnclose(mbr) == True: result", "returns entries def doContainmentQuery(self, mbr): partial_result = [] self.doContainmentQueryHelper(mbr, self.getRootEntry(), partial_result) return partial_result", "upper_left[0] x2 = lower_right[0] margin = x2 - x1 return margin if self.getDimension()", "xtreeInsert(self, entry): # print \"insert\" return self.xtreeInsertHelper(entry, self.getRootEntry().getChild()) SPLIT = 0 SUPERNODE =", "entry, curr_entry): \"\"\" if node.isLeafNode() == False: curr_mbr = entry.getMBR() entries = self.getEntries()", "mbr = CompositeMBR(upper_left_point, lower_right_point, result_mbr_list) return mbr @staticmethod def getAreaEnlargement(base_mbr, mbr): base_mbr_area =", "ee = resulting_entries_from_split l = e.getChild() ll = ee.getChild() if (self.getRootEntry().getChild().getNumEntries() + 1)", "= (first_priority_component, second_priority_component) # priority = -1 * root_mbr_area # entry_pq = PriorityQueue()", "= conflict_x_tree.doContainmentQuery(mbr) for matching_entry in matching_entries: # raise Exception() conflict_x_tree.delete(matching_entry) # if node", "10, 10), (9, 10, 10), (3, 10, 10), (1, 10, 10), (3, 10,", "chooseSubtree(self, entry, node): entries = node.getEntries() candidate_entries = None # if node.isLeafNode() ==", "return next_result def xtreeSupernodeInsert(self, node, entries): if node.isSuperNode() == False: node.setToSuperNode(True) # questionable", "min(overlap_values) matching_overlap_value_tagged_candidate_distributions = [x for x in overlap_value_tagged_candidate_distributions if x[0] == min_overlap_value] next_next_candidates", "= [] for i in xrange(component_mbr_list[0].getDimension()): components = [x[i] for x in points]", "self.getRootEntry() entries = [root_entry] RTreeEntry.draw(self, entries, image, 0) \"\"\" image.strokeColor(\"orange\") image.fillColor(\"none\") image.strokeWidth(4) multiplier", "in xrange(mbr_a.getDimension()): # a \"left\" comp_a1 = min(upper_left_a[i], lower_right_a[i]) # a \"right\" comp_a2", "tree.adjustTree(tree, l, [e, ee], True, False) else: return (False, []) \"\"\" # assume", "return self.toNumChildrenStringHelper(root) def toNumChildrenStringHelper(self, node): if node == None: return \"\" entries =", "comp_b2 # do_overlap = True if without_borders == True: do_overlap = do_overlap and", "node.getEntries() entry = None if node.getParent() == None: entry = tree.getRootEntry() else: entry", "entry_collection1, None, entry1) entry1.setChild(node1) entry2 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection2), None) node2 = RTreeNode(parent, entry_collection2, None,", "return self.hasConsistentNonTraditionalLeafDepthValuesHelper(root, depth, 0) def hasConsistentNonTraditionalLeafDepthValuesHelper(self, node, depth, curr_depth): if node == None:", "m) S_comp_dict = {} for i in xrange(entries[0].getMBR().getDimension()): low_comp_distributions, upper_comp_distributions = result[i] S_comp_value", "without_borders == True: do_overlap = do_overlap and comp_a1 < comp_b2 and comp_a2 >", "120) \"\"\" denominator = (100 * math.log(100, 2)) ** (1 / 3.0) k", "root.draw(self, draw, 0) im.save(\"tree.png\", \"PNG\") \"\"\" # image = PythonMagick.Image(PythonMagick.Geometry(\"768x768\"), \"white\") image =", "return overall_str def toString(self): root = self.getRootEntry().getChild() return self.toStringHelper(root) def toStringHelper(self, node): if", "200, 100), point4) node4 = RTreeNode(None, [], True) entry4 = RTreeEntry(mbr4, node4) node4.setEntry(entry4)", "partial_result = [] self.doOverlapQueryHelper(mbr, self.getRootEntry(), partial_result, without_borders) return partial_result def doOverlapQueryHelper(self, mbr, entry,", "# entry = entry_pq.pop() item = None if len(heap) != 0: (priority,item) =", "MBR.getEnlargedMBR(base_mbr, mbr) tagged_mbr_list.append((curr_mbr, curr_entry)) tagged_area_values = [(x[0].getArea(), x[1]) for x in tagged_mbr_list] area_values", "conflict_x_tree.doEnclosureQueryWithEarlyStopping(mbr) == True: # ignore node if enclosing mbr exists in conflict x-tree", "for 200x growth; expected 1528x slower) # n = 2000 # n =", "in range(len(entries)): curr_entry = entries[i] curr_mbr = curr_entry.getMBR() mbr_to_entry_dict[curr_mbr] = curr_entry mbr_list =", "mbr # is not contained by reference rectangle; # check explicitly for this", ">= m] window_size_pairs = [(window_left_sizes[i], len(entries) - window_left_sizes[i]) for i in range(len(window_left_sizes))] window_size_pairs", "mbr.doesEnclose(reference_mbr) == False and reference_mbr.doesEnclose(mbr) == False: # ignore node if associated mbr", "mbr.getUpperLeft() lower_right = mbr.getLowerRight() x1, y1 = upper_left x2, y2 = lower_right multiplier", "1) <= self.getRootEntry().getChild().getMaximumNumEntriesPerNode(): self.getRootEntry().getChild().addEntry(ee) ll.setParent(self.getRootEntry().getChild()) else: split_result = self.rstarSplitNode(self.getRootEntry().getChild(), ee) l, ll, e,", "in xrange(1000): upper_left = (0, 0) lower_right = (10, 10) mbr = RawMBR(upper_left,", "in low_constituent_mbr_list_pairs] low_margin_values = [x[0].getMarginValue() + x[1].getMarginValue() for x in low_mbr_pairs] low_margin_value_sum =", "False) else: split_result = tree.rstarSplitNode(parent, partner_entry) l, ll, e, ee = split_result return", "lower_right_a[i]) # b \"left\" comp_b1 = min(upper_left_b[i], lower_right_b[i]) # b \"right\" comp_b2 =", "dimension is implicit (determined using points sampled) and assumed to be consistent #", "node.getEntries() candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry)", "i in xrange(n): upper_left = None lower_right = None \"\"\" if i %", "w.r.t. reference rectangle, add children to priority queue, # ignore if contained rectangle", "# print \"conflict x-tree:\", conflict_x_tree.toString() # for a well-formed r-tree, this takes O(n", "actual rectangles # for an r-tree and O(n * log(n)) time at worst;", "root depth = 0 while curr_node.isLeafNode() == False: curr_node = curr_node.getChildren()[0] depth =", "i in xrange(10): # for i in xrange(4): \"\"\" ul_lr_pairs = [((797, 989,", "for added_node in added_nodes: node.addEntry(added_node.getEntry()) added_node.setParent(node) if split_status == RTree.SPLIT: # added_node.setParent(node) if", "xrange(i + 1, self.getDimension()): comp_2a = upper_left[j] comp_2b = lower_right[j] term2 = comp_2b", "return is_non_traditional_leaf_node \"\"\" def isTraditionalLeafNode(self): is_traditional_leaf_node = self.getNumEntries() == 0 return is_traditional_leaf_node \"\"\"", "y2 = 60 next_x1 = x1 * multiplier + offset next_y1 = y1", "mbr.getLowerRight()[i] component_does_enclose = left_value1 <= left_value2 and right_value1 >= right_value2 if component_does_enclose ==", "x: x[0], reverse = True) for priority_tagged_internal_entry in priority_tagged_internal_entries: priority, internal_entry = priority_tagged_internal_entry", "== True: priority = -1 * curr_mbr_area item = curr_entry pair = (priority,item)", "for conflict x-tree returns entries matching_entries = conflict_x_tree.doContainmentQuery(mbr) for matching_entry in matching_entries: #", "200, 100), point2) node2 = RTreeNode(None, [], True) entry2 = RTreeEntry(mbr2, node2) node2.setEntry(entry2)", "in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) partner_entry = None if have_resulting_second_entry_from_split == True:", "i % 4 == 1: upper_left = (20, 20) lower_right = (40, 40)", "= entries[ : ] low_sorted_entries.sort(key = lambda x: x.getMBR().getUpperLeft()[i]) low_distributions = [(low_sorted_entries[ :", "return self.M def isFull(self): return self.getNumEntries() >= self.getMaximumNumEntriesPerNode() def isUnderfull(self): return self.getNumEntries() <", "lower_right = (10, 10) mbr = RawMBR(upper_left, lower_right, None) node = RTreeNode(None, [],", "self.upper_left def getLowerRight(self): return self.lower_right def getArea(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight()", "root_node.setEntry(root_entry) self.setRootEntry(root_entry) def getRootEntry(self): return self.root_entry def setRootEntry(self, root_entry): self.root_entry = root_entry def", "0)) curr_mbr2 = RawMBR((50, 100, 0), (50, 100, 0), point3) curr_mbr2b = RawMBR((50,", "3.0) / denominator)) # for n = 5500 # k = int(round((10000 *", "do_overlap and comp_a1 <= comp_b2 and comp_a2 >= comp_b1 if do_overlap == False:", "entry_pq # raise Exception() result_entry_list = [] self.getRectangleCloseDescendantsHelper(heap, reference_mbr, result_entry_list, reference_entry) return result_entry_list", "% 4 == 2: upper_left = (60, 60) lower_right = (80, 80) elif", "0 y1 = 0 x2 = 47 y2 = 60 next_x1 = x1", "0) lower_right = (10, 10) elif i % 4 == 1: upper_left =", "mbr_list): MBR.__init__(self, upper_left, lower_right) self.mbr_list = mbr_list def getMBRList(self): return self.mbr_list def isComposite(self):", "color_choice == 0: color = PythonMagick.Color(65535, 0, 0, 32767) elif color_choice == 1:", "prune using enclosure/containment # w.r.t. reference rectangle, add children to priority queue, #", "node.getParent() curr_entries = node.getEntries() entry = None \"\"\" if node.getParent() == None: entry", "10, 10)] # for i in xrange(10): # for i in xrange(4): \"\"\"", "lower_right_a = mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight() do_overlap = True #", "# is not contained by reference rectangle; # check explicitly for this case", "2 * m + 2 + 1)] window_left_sizes = [x for x in", "xrange(base_mbr.getDimension()): components = [x[i] for x in points] min_comp_value = min(components) max_comp_value =", "\\ ((1081, 1056, 1020), (1708, 1075, 1542)), \\ ((358, 815, 372), (761, 1089,", "node.setEntry(entry) entries.append(entry) for i in xrange(1000): upper_left = (0, 0) lower_right = (10,", "node.getMaximumNumEntriesPerNode(): node.setToSuperNode(False) if node.getParent() == None: # we are a root node if", "upper_left, lower_right, id_value): self.upper_left = upper_left self.lower_right = lower_right self.id_value = id_value def", "in mbr_list: upper_left = mbr.getUpperLeft() lower_right = mbr.getLowerRight() x1, y1 = upper_left x2,", "None: continue else: return curr_node return None \"\"\" # a little stilted since", "assumption is necessary # to make strong running time estimates; the reason is", "== True: self.doOverlapQueryHelper(mbr, curr_entry, partial_result, without_borders) # returns entries def doEnclosureQuery(self, mbr): partial_result", "= (priority, curr_entry) priority_tagged_internal_entries.append(priority_tagged_internal_entry) # item = curr_entry # pair = (priority,item) #", "does_enclose = False break return does_enclose def isEqualTo(self, mbr): upper_left1 = self.getUpperLeft() lower_right1", "split\" return (RTree.NO_SPLIT, [node]) if node.isLeafNode() == True: # split just in case", "ended_with_split2, resulting_entries_from_split = adjust_result if ended_with_split2 == True: e, ee = resulting_entries_from_split l", "entry in entries[0 : 15]: for entry in entries: tree2.insert(entry) \"\"\" if entry.getChild().getParent()", "getParent(self): return self.parent def getEntries(self): return (self.child_to_entry_dict).values() def getEntryForChild(self, child_node): return (self.child_to_entry_dict)[child_node] def", "self.getRootEntry().getChild() curr_node = root depth = 0 while curr_node.isLeafNode() == False: curr_node =", "= self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) chosen_entry =", "entry.getChild() node.addEntry(entry) # needed this curr_node.setParent(node) \"\"\" entries = node.getEntries() mbr_list = [x.getMBR()", "return item def isEmpty(self): return len(self.heap) == 0 def peek(self): heap = self.heap", "+ [self.isRaw()]) return result def getDimension(self): return len(self.getUpperLeft()) def doesEnclose(self, mbr): dimension =", "\"\"\" # image = PythonMagick.Image(PythonMagick.Geometry(\"768x768\"), \"white\") image = PythonMagick.Image(PythonMagick.Geometry(\"1536x1536\"), \"white\") root_entry = self.getRootEntry()", "result = self.findLeafHelper(entry, curr_node) if result == None: continue else: return curr_node return", "item = root_entry pair = (priority,item) heapq.heappush(heap,pair) # print entry_pq # raise Exception()", "reduce(lambda x, y: x * y, sides) return area @staticmethod def getEnlargedMBR(base_mbr, mbr):", "times involve n, which is number of actual rectangles # or leaves in", "to low-level # Q = list(set(Q)) Q.reverse() for curr_node in Q: curr_entry =", "takes O(log(n)) time on average for start rectangle # taken from set of", "tree.delete(entry2) tree.delete(entry3) tree.delete(entry4) tree.delete(entry5) tree.delete(entry6) tree.delete(entry7) tree.delete(entry8) \"\"\" print tree.toString() tree2 = RTree()", "intersection_volume = reduce(lambda x, y: x * y, sides) return intersection_volume def getMarginValue(self):", "print \"removing entry with mbr:\", entry.getMBR().toString() # print \"tree, currently:\", tree.toString() # tree2.delete(entry)", "entry_group2: curr_entry.getChild().setParent(node2) mbr_group1 = [x.getMBR() for x in entry_group1] mbr_group2 = [x.getMBR() for", "from collections import deque # min-pq class PriorityQueue: def __init__(self): self.heap = []", "rstarChooseLeaf(self, entry): return self.rstarChooseLeafHelper(entry, self.getRootEntry().getChild()) def rstarChooseLeafHelper(self, entry, node): if node.isLeafNode() == True:", "return self.getNumEntries() >= self.getMaximumNumEntriesPerNode() def isUnderfull(self): return self.getNumEntries() < self.getMinimumNumEntriesPerNode() def retrieveEntryForChild(self, node):", "len(self.heap) import math def getDistance(point1, point2): x1, y1 = point1 x2, y2 =", "z1 = int(100 + random.randint(0, k) * 100) # x2 = int(x1 +", "= surface_area return margin def toString(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() result", "(60, 60) lower_right = (80, 80) elif i % 4 == 3: upper_left", "seconds (~1x slower for 1x growth; expected 1x slower) # n = 1000", "23.899 seconds (~55.96x slower for 55x growth; expected 317x slower) # n =", "overall_str_list = None if have_node_str == True: curr_leaf_status = \"-\" if node.isLeafNode() ==", "tree.toString() curr_root = tree.getRootEntry().getChild() mbr1 = RawMBR(point1, (110, 200, 100), point1) node1 =", "expected 664x slower) # n = 14500 # 170.053 seconds (~398x slower for", "self.getDimension() == 2: x1, y1 = upper_left x2, y2 = lower_right margin =", "upper_left = mbr.getUpperLeft() lower_right = mbr.getLowerRight() x1, y1 = upper_left x2, y2 =", "ee = split_result resulting_entries_from_split = [e, ee] next_root = RTreeNode(None, resulting_entries_from_split, False, self.getRootEntry())", "= 20000 # k = int(round((14500 * math.log(14500, 2)) ** (1 / 3.0)", "54 sec. for pypy with m = 2 and M = 4 #", "% 3 color = None if color_choice == 0: color = PythonMagick.Color(65535, 0,", "match is found # finds one match if such a node exists #", "in conflict x-tree, # add actual rectangles to conflict x-tree, # use as", "int(round((10000 * math.log(10000, 2)) ** (1 / 3.0) / denominator)) # for n", "root = self.getRootEntry().getChild() curr_node = root depth = 0 while curr_node.isLeafNode() == False:", "entry): return self.rstarChooseLeafHelper(entry, self.getRootEntry().getChild()) def rstarChooseLeafHelper(self, entry, node): if node.isLeafNode() == True: if", "root_node = RTreeNode(None, [], True) root_mbr = CompositeMBR(None, None, None) root_entry = RTreeEntry(root_mbr,", "L # call algorithm condenseTree(L) # if the root has only one child", "5500 # k = int(round((10000 * math.log(10000, 2)) ** (1 / 3.0) /", "with m = 2 and M = 4 # n = 1,000 works", "raw mbr # leaf is a non-traditional leaf leaf_node = child_node.getParent() if entry", "upper_left2 = mbr.getUpperLeft() lower_right2 = mbr.getLowerRight() is_equal = upper_left1 == upper_left2 and lower_right1", "self.getNumChildren() == 0 return is_leaf_node def addEntry(self, entry): curr_child = entry.getChild() (self.child_to_entry_dict)[curr_child] =", "= self.getRootEntry().getChild() \"\"\" if root.getNumChildren() == 1: # shorten tree entries = root.getEntries()", "min_enlargement_value] candidate_entries = [mbr_to_entry_dict[x[1]] for x in candidate_tagged_enlargement_values] return candidate_entries def resolveEnlargementTie(self, entries,", "candidate_tagged_area_values = [x for x in tagged_area_values if x[0] == min_area] candidate_entries =", "result1 = self.xtreeTopologicalSplit(node, entry) entry_collection1, entry_collection2, dimension = result1 mbr_collection1 = [x.getMBR() for", "i in xrange(base_mbr.getDimension()): components = [x[i] for x in points] min_comp_value = min(components)", "return (RTree.NO_SPLIT, [node]) \"\"\" follow = self.chooseSubtree(entry, node).getChild() result = self.xtreeInsertHelper(entry, follow) split_status,", "node2.setEntry(entry2) if parent != None: original_entry = parent.retrieveEntryForChild(curr_node) parent.removeEntry(original_entry) if node != self.getRootEntry().getChild():", ": 15]: for entry in entries: tree2.insert(entry) \"\"\" if entry.getChild().getParent() == None: raise", "dimension) else: return (True, entry_collection3, entry_collection4, dimension) else: return (True, entry_collection1, entry_collection2, dimension)", "heapq.heappush(heap,pair) # print entry_pq # raise Exception() result_entry_list = [] self.getRectangleCloseDescendantsHelper(heap, reference_mbr, result_entry_list,", "self.getLowerRight() upper_left2 = mbr.getUpperLeft() lower_right2 = mbr.getLowerRight() is_equal = upper_left1 == upper_left2 and", "for curr_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), mbr) == True: self.doOverlapQueryHelper(mbr, curr_entry, partial_result, without_borders)", "time2 = time.time() time_diff = time2 - time1 print \"time difference:\", time_diff, \"seconds\"", "290), (579, 950, 700)), \\ ((297, 196, 750), (1085, 718, 1259)), \\ ((808,", "that cover E.mbr # follow the corresponding subtrees unti lthe leaf L that", "[] self.condenseTreeHelper(leaf_node, Q) # Q is in order of low-level to high-level; #", "self.vec = vec self.id_value = id_value @staticmethod def toPoint(mbr): if mbr.getUpperLeft() != mbr.getLowerRight():", "def isLeafNode(self): # is_leaf_node = (self.getParent() == None and self.getNumChildren() == 0) or", "260, 1091), (1932, 332, 1133)), \\ ((262, 221, 872), (500, 279, 1521)), \\", "[] self.getNodesHelper(self.getRootEntry().getChild(), node_list) return node_list def getNodesHelper(self, node, partial_result): partial_result.append(node) for curr_node in", "lower_right = self.getLowerRight() sides = [] for i in xrange(self.getDimension()): comp1 = upper_left[i]", "self.rstarSplitNodeHelper(node, E_overall, entry) def rstarSplitNodeHelper(self, node, E_overall, entry): # prev_leaf_status = node.isLeafNode() prev_leaf_status", "entry != self.getRootEntry() else None if leaf_node == None: raise Exception(\"expected a node", "= RTreeEntry(mbr6, node6) node6.setEntry(entry6) tree.insert(entry6) mbr7 = RawMBR(point7, (110, 200, 100), point7) node7", "False) else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(), resulting_entries_from_split, have_resulting_second_entry_from_split) \"\"\" @staticmethod def adjustTree(tree, node, resulting_entries_from_split,", "= CompositeMBR.makeMBR(mbr_collection2) overlap_area = MBR.findOverlapArea(mbr1, mbr2) area1 = mbr1.getArea() area2 = mbr2.getArea() union_area", "= RTree() overlap_area_sum = sum([x.getArea() for x in mbr_list]) for curr_mbr in mbr_list:", "ignore_entry): conflict_x_tree = RTree() internal_node_stack_deque = deque() # while len(heap) != 0: while", "= heapq.heappop(heap) elif len(internal_node_stack_deque) != 0: item = internal_node_stack_deque.popleft() # (priority,item) = heapq.heappop(heap)", "== True: # split just in case # print \"split\" return (RTree.SPLIT, [node])", "and M = 16 # these numbers are for upper-left's in (100, 10100)", "not contained by reference rectangle; # check explicitly for this case if reference_mbr.doesEnclose(mbr)", "[] reference_mbr = reference_entry.getMBR() root_entry = self.getRootEntry() root_node = root_entry.getChild() root_mbr = root_entry.getMBR()", "if result == None: continue else: return curr_node return None \"\"\" # a", "True) next_entry = RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) conflict_x_tree.insert(next_entry) elif node.isLeafNode() == False: # if", "curr_entry.getChild() curr_mbr = curr_entry.getMBR() curr_mbr_is_actual = curr_mbr.isRaw() curr_mbr_is_contained = reference_mbr.doesEnclose(curr_mbr) curr_mbr_area = curr_mbr.getArea()", "tuple(max_components) result_mbr = CompositeMBR(upper_left_point, lower_right_point, component_mbr_list) return result_mbr class HyperRectangle: def __init__(self, upper_left,", "reference_mbr.doesEnclose(mbr) == False: continue # kick out close descendant candidates on occasion, #", "2 def xtreeInsertHelper(self, entry, node): split_status = None next_mbr = None if True:", "self.chooseSubtree(entry, node).getChild() result = self.xtreeInsertHelper(entry, follow) split_status, added_nodes = result curr_entry = node.getEntry()", "upper_left_points + lower_right_points min_components = [] max_components = [] for i in xrange(base_mbr.getDimension()):", "self.toDepthStringHelper(child, depth + 1) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list,", "low_comp_distributions, upper_comp_distributions = result[i] S_comp_value = 0 low_constituent_mbr_list_pairs = [([y.getMBR() for y in", "= [x.getMBR() in node.getEntries()] curr_x_tree = RTree() overlap_area_sum = sum([x.getArea() for x in", "= self.getRootEntry().getChild() return self.toLeafStatusStringHelper(root) def toLeafStatusStringHelper(self, node): if node == None: return \"\"", "on 2016-08-23 to fix traditional/non-traditional isLeafNode() distinction # updated on 2016-08-25 to fix", "add to priority queue curr_node = curr_entry.getChild() curr_mbr = curr_entry.getMBR() curr_mbr_is_actual = curr_mbr.isRaw()", "if node.getNumChildren() == 0 else [node.getEntry().getMBR().toString(), str(node)] for entry in entries: child =", "if node.isLeafNode() == True and node == self.getRootEntry().getChild(): node.addEntry(entry) curr_node = entry.getChild() curr_node.setParent(node)", "int(round(denominator / denominator)) # for n = 100 # k = int(round((1000 *", "self.getLowerRight() result = str(list(upper_left + lower_right) + [self.isRaw()]) return result def getDimension(self): return", "self.setRootEntry(root_entry) def getRootEntry(self): return self.root_entry def setRootEntry(self, root_entry): self.root_entry = root_entry def hasConsistentNonTraditionalLeafDepthValues(self):", "= curr_entry mbr_list = [x.getMBR() for x in entries] mbr = entry.getMBR() tagged_enlargement_values", "= PriorityQueue() heap = [] # entry_pq.push(root_entry, priority) item = root_entry pair =", "if enclosing mbr exists in conflict x-tree continue if entry == ignore_entry: #", "chosen_d_value @staticmethod def rstarChooseSplitIndex(entries, axis, M, m): result = RTree.rstarGenDistributions(entries, M, m) candidate_distributions", "acyclic and we have cliques # note that we don't necessarily need PythonMagick", "curr_entry mbr_list = [x.getMBR() for x in entries] mbr = entry.getMBR() tagged_enlargement_values =", "and reference_mbr.doesEnclose(curr_mbr) == False: continue # item = curr_entry # internal_node_stack_deque.appendleft(item) priority =", "200, 100), point8) node8 = RTreeNode(None, [], True) entry8 = RTreeEntry(mbr8, node8) node8.setEntry(entry8)", "True or len(entry_collection3) < node.getMinimumNumEntriesPerNode() or len(entry_collection4) < node.getMinimumNumEntriesPerNode(): return (False, None, None,", "= chosen_entry.getChild() return self.chooseLeafHelper(entry, chosen_child) def rstarChooseLeaf(self, entry): return self.rstarChooseLeafHelper(entry, self.getRootEntry().getChild()) def rstarChooseLeafHelper(self,", "None curr_node = node m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() axis = RTree.rstarChooseSplitAxis(E_overall,", "slower) # n = 20000 # 230.0411 seconds (~538x slower for 200x growth;", "self.getRootEntry().getChild() \"\"\" if root.getNumChildren() == 1: # shorten tree entries = root.getEntries() chosen_entry", "is number of actual rectangles or leaves; # assumes that rectangles are distinct", "node4) node4.setEntry(entry4) tree.insert(entry4) mbr5 = RawMBR(point5, (110, 200, 100), point5) node5 = RTreeNode(None,", "a leaf, # which is not the case when we initially insert parent", "upper_left = (x, y) lower_right = (x, y) # upper_left = ul_lr_pairs[i][0] #", "rstarChooseSplitIndex(entries, axis, M, m): result = RTree.rstarGenDistributions(entries, M, m) candidate_distributions = None candidate_distributions", "result_mbr = CompositeMBR(upper_left_point, lower_right_point, component_mbr_list) return result_mbr class HyperRectangle: def __init__(self, upper_left, lower_right,", "+ random.randint(0, k) * 100) # x2 = int(x1 + random.random() * 100)", "10), (6, 10, 10), (9, 10, 10), (6, 10, 10), (9, 10, 10),", "== 0 for x in self.getEntries()]) is_leaf_node = self.getNumChildren() == 0 return is_leaf_node", "self.toNumChildrenStringHelper(root) def toNumChildrenStringHelper(self, node): if node == None: return \"\" entries = node.getEntries()", "upper_left_matches == True and lower_right_matches == True return result class CompositeMBR(MBR): def __init__(self,", "RTree() overlap_area_sum = sum([x.getArea() for x in mbr_list]) for curr_mbr in mbr_list: next_mbr", "= entries[ : ] upper_sorted_entries.sort(key = lambda x: x.getMBR().getLowerRight()[i]) upper_distributions = [(upper_sorted_entries[ :", "heapq.heappop(self.heap) return item def isEmpty(self): return len(self.heap) == 0 def peek(self): heap =", "x in mbr_list_pair_tagged_candidate_distributions] combined_area_tagged_next_candidate_distributions = [(x[0][0].getArea() + x[0][1].getArea(), x[1]) for x in mbr_pair_tagged_next_candidate_distributions]", "which for a well-formed r-tree, takes O(n * log(n)) time; # these times", "return False # returns entries def doContainmentQuery(self, mbr): partial_result = [] self.doContainmentQueryHelper(mbr, self.getRootEntry(),", "getIDValue(self): return self.id_value import string class RTree: def __init__(self): root_node = RTreeNode(None, [],", "+ k for k in range(1, M - 2 * m + 2", "single-start-rectangle-based # close-descendant finding that takes O(log(n)) time on average # for start", "= min(overlap_values) matching_overlap_value_tagged_candidate_distributions = [x for x in overlap_value_tagged_candidate_distributions if x[0] == min_overlap_value]", "for 145x growth; expected 1040x slower) # n = 20000 # 230.0411 seconds", "- y1) return margin surface_area = 0 for i in xrange(self.getDimension()): comp_1a =", "1 else: overlap_ratio = 0 else: overlap_ratio = overlap_area / (1.0 * union_area)", "return node.getParent() else: entries = node.getEntries() candidate_entries = None # if node.isLeafNode() ==", "== True: curr_leaf_status = \"-\" if node.isLeafNode() == False else \"+\" overall_str_list =", "to a point\") return mbr.getUpperLeft() def getVec(self): return self.vec def getComponent(self, d): return", "cliques # note that we don't necessarily need PythonMagick # note that nodes", "if curr_mbr_is_contained == True else 1) * curr_mbr_area # min-pq # priority =", "= (multiplier * x2 + offset, multiplier * y2 + offset) if depth", "Exception() conflict_x_tree.delete(matching_entry) # if node is a leaf node, it has an actual", "def setEntry(self, entry): self.entry = entry def isSuperNode(self): return self.is_supernode def setToSuperNode(self, is_supernode):", "self.getNodesHelper(self.getRootEntry().getChild(), node_list) return node_list def getNodesHelper(self, node, partial_result): partial_result.append(node) for curr_node in node.getChildren():", "= int(100 + random.randint(0, k) * 100) # z1 = int(100 + random.randint(0,", "next_x2 = x2 * multiplier + offset next_y2 = y2 * multiplier +", "node, Q): # demote super-node if necessary if node.isSuperNode() == True and node.getNumChildren()", "if node.isUnderfull() == False: # print \"not underfull\" parent = node.getParent() curr_entries =", "0) \"\"\" image.strokeColor(\"orange\") image.fillColor(\"none\") image.strokeWidth(4) multiplier = 3 * 0.8 # offset =", "return result_mbr class HyperRectangle: def __init__(self, upper_left, lower_right, id_value): self.upper_left = upper_left self.lower_right", "have supernode demotion when size decreases to or below M # updated on", "self.root_entry = root_entry def hasConsistentNonTraditionalLeafDepthValues(self): root = self.getRootEntry().getChild() curr_node = root depth =", "\"\"\" # image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2)) image.write(\"tree.png\") def main(): point1 = (30, 100,", "lower_right def isRaw(self): return False def isComposite(self): return False def getUpperLeft(self): return self.upper_left", "setParent(self, node): self.parent = node def isNonTraditionalLeafNode(self): is_non_traditional_leaf_node = (self.getParent() == None and", "= self.chooseEntriesWithMinimalOverlapEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(candidate_entries, entry) if len(candidate_entries)", "= None if True: # if node.getNumChildren() == 0 and node == self.getRootEntry().getChild():", "None, is_supernode = False): self.parent = parent self.is_leaf = is_leaf self.m = 8", "100), point3) node3 = RTreeNode(None, [], True) entry3 = RTreeEntry(mbr3, node3) node3.setEntry(entry3) tree.insert(entry3)", "False, False) else: parent.addEntry(entry) entry.getChild().setParent(parent) split_result = tree.splitNode(parent, partner_entry) l, ll, e, ee", "entries = [root_entry] RTreeEntry.draw(self, entries, image, 0) \"\"\" image.strokeColor(\"orange\") image.fillColor(\"none\") image.strokeWidth(4) multiplier =", "(10, 10) elif i % 4 == 1: upper_left = (20, 20) lower_right", "n = 100 # 0.427 seconds (~1x slower for 1x growth; expected 1x", "# demote super-node if necessary if node.isSuperNode() == True and node.getNumChildren() <= node.getMaximumNumEntriesPerNode():", "\"maximal disjointedness\" # and depth-first stack for internal nodes and # best-first priority", "tagged_overlapped_mbr in tagged_overlapped_mbr_list: curr_mbr, curr_entry = tagged_overlapped_mbr curr_node = curr_entry.getChild() result = self.findLeafHelper(entry,", "= raw_mbr.clone() next_node = RTreeNode(None, [], True) next_entry = RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) conflict_x_tree.insert(next_entry)", "self.doEnclosureQueryWithEarlyStoppingHelper(mbr, self.getRootEntry()) return result def doEnclosureQueryWithEarlyStoppingHelper(self, mbr, entry): if entry.getMBR().isRaw() == True: if", ">= m and (len(entries) - x) <= M and (len(entries) - x) >=", "# a little stilted since we don't need a O(log(n)) time operation #", "def getMinimumNumEntriesPerNode(self): return self.m def getMaximumNumEntriesPerNode(self): return self.M def isFull(self): return self.getNumEntries() >=", "RTreeNode(None, [], True) root_mbr = CompositeMBR(None, None, None) root_entry = RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry)", "if node.getParent() == None: entry = tree.getRootEntry() else: entry = parent.retrieveEntryForChild(node) children =", "for x in window_left_sizes if x <= M and x >= m and", "= [x for x in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() == True] for keep_node in", "for adjustTree(); # fixed bug with parent pointers for xtreeInsert(); # have supernode", "tree.getRootEntry() else: entry = parent.retrieveEntryForChild(node) children = [x.getChild() for x in curr_entries] mbr_list", "entry_collection2, dimension = result1 mbr_collection1 = [x.getMBR() for x in entry_collection1] mbr_collection2 =", "self.split_history_root_dimension = dim def getParent(self): return self.parent def getEntries(self): return (self.child_to_entry_dict).values() def getEntryForChild(self,", "ee.getChild() if (self.getRootEntry().getChild().getNumEntries() + 1) <= self.getRootEntry().getChild().getMaximumNumEntriesPerNode(): self.getRootEntry().getChild().addEntry(ee) ll.setParent(self.getRootEntry().getChild()) else: split_result = self.rstarSplitNode(self.getRootEntry().getChild(),", "in entry_collection1] mbr_collection2 = [x.getMBR() for x in entry_collection2] mbr1 = CompositeMBR.makeMBR(mbr_collection1) mbr2", "node3 = RTreeNode(None, [], True) entry3 = RTreeEntry(mbr3, node3) node3.setEntry(entry3) tree.insert(entry3) mbr4 =", "RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry) \"\"\" for i in xrange(10): upper_left = (20, 20)", "== False: does_enclose = False break return does_enclose def isEqualTo(self, mbr): upper_left1 =", "= True # assume that rectangles never have negative area for i in", "node): self.parent = node def isNonTraditionalLeafNode(self): is_non_traditional_leaf_node = (self.getParent() == None and self.getNumChildren()", "= MBR.getEnlargedMBR(base_mbr, mbr) tagged_mbr_list.append((curr_mbr, curr_entry)) tagged_area_values = [(x[0].getArea(), x[1]) for x in tagged_mbr_list]", "1x growth; expected 1x slower) # n = 1000 # 1.1649 seconds (~2.72x", "leaf leaf_node = child_node.getParent() if entry != self.getRootEntry() else None if leaf_node ==", "100, 0) curr_mbr1 = RawMBR((100, 100, 0), (100, 100, 0), (100, 100, 0))", "= self.toNumChildrenStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \" \")", "x[1]) for x in mbr_pair_tagged_next_candidate_distributions] combined_area_values = [x[0] for x in combined_area_tagged_next_candidate_distributions] min_combined_area_value", "RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False) else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(), resulting_entries_from_split, have_resulting_second_entry_from_split) \"\"\" @staticmethod def", "partial_result) def doEnclosureQueryWithEarlyStopping(self, mbr): result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, self.getRootEntry()) return result def doEnclosureQueryWithEarlyStoppingHelper(self, mbr,", "PythonMagick.Image(PythonMagick.Geometry(\"1536x1536\"), \"white\") root_entry = self.getRootEntry() entries = [root_entry] RTreeEntry.draw(self, entries, image, 0) \"\"\"", "offset center_y = next_y + offset radius = 2 perimeter_x = next_x +", "have_node_str = True overall_str_list = None if is_root_node == False: overall_str_list = [node.getEntry().getMBR().toString()]", "** (1 / 3.0) / denominator)) # for n = 14500 # x1", "+ string.join(overall_str_list, \" \") + \")\" return overall_str def toString(self): root = self.getRootEntry().getChild()", "all entries of RN to find E.mbr # else: # RN is an", "bug fix for adjustTree(); # fixed bug with parent pointers for xtreeInsert(); #", "sides) return intersection_volume def getMarginValue(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() if self.getDimension()", "return self.upper_left def getLowerRight(self): return self.lower_right def getArea(self): upper_left = self.getUpperLeft() lower_right =", "self.getRootEntry().getChild() if is_root_node == True: have_node_str = True overall_str_list = None if is_root_node", "[x.getMBR() for x in partner_entries] partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if have_resulting_second_entry_from_split == True:", "def getEntryForChild(self, child_node): return (self.child_to_entry_dict)[child_node] def getChildren(self): return (self.child_to_entry_dict).keys() def getNumEntries(self): return len(self.child_to_entry_dict)", "\"mbr:\", curr_entry.getMBR().toString() # print \"tree:\", self.toString() self.insert(curr_entry) def condenseTreeHelper(self, node, Q): # demote", "necessary if node.isSuperNode() == True and node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): node.setToSuperNode(False) if node.getParent() ==", "= upper_left[i] comp2 = lower_right[i] side = comp2 - comp1 sides.append(side) area =", "rectangles for an r-tree; # takes O(n * log(n)) time at worst; #", "updated on 2016-11-16 to fix margin calculation # note that we assume rectangles", "to include associated entry in result; # if we made it this far,", "self.rstarSplitNode(leaf_node, entry) l, ll, e, ee = split_result adjust_result = RTree.rstarAdjustTree(self, l, [e,", "pair = (priority,item) heapq.heappush(heap,pair) elif curr_mbr.isRaw() == False: if curr_mbr.doesEnclose(reference_mbr) == False and", "to priority queue entries = node.getEntries() priority_tagged_internal_entries = [] for curr_entry in entries:", "= lambda x: x.getMBR().getLowerRight()[i]) upper_distributions = [(upper_sorted_entries[ : window_left_sizes[j]], upper_sorted_entries[window_left_sizes[j] : ]) for", "928, 1028), (1762, 1795, 1309)), \\ ((225, 359, 290), (579, 950, 700)), \\", "O(log(n)) time on average for start rectangle # taken from set of actual", "candidate_distributions] mbr_pair_tagged_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for x in mbr_list_pair_tagged_candidate_distributions] overlap_value_tagged_candidate_distributions = [(MBR.findOverlapArea(x[0][0],", "leaf node # search all entries of RN to find E.mbr # else:", "= False): upper_left_a = mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft() lower_right_b =", "offset) if depth != 0: pass color_choice = depth % 3 color =", "entries def getRectangleCloseDescendants(self, reference_entry): # repeatedly pop nodes, prune using enclosure/containment # w.r.t.", "if root_mbr_is_contained == True else 1 second_priority_component = (-1 if root_mbr_is_contained == True", "# returns a node, which can be None if no match is found", "(self.getRootEntry().getChild().getNumEntries() + 1) <= self.getRootEntry().getChild().getMaximumNumEntriesPerNode(): self.getRootEntry().getChild().addEntry(ee) ll.setParent(self.getRootEntry().getChild()) else: split_result = self.rstarSplitNode(self.getRootEntry().getChild(), ee) l,", "tree.insert(entry1) \"\"\" tree.delete(entry1) tree.delete(entry2) tree.delete(entry3) tree.delete(entry4) tree.delete(entry5) tree.delete(entry6) tree.delete(entry7) tree.delete(entry8) \"\"\" print tree.toString()", "matching_overlap_value_tagged_candidate_distributions = [x for x in overlap_value_tagged_candidate_distributions if x[0] == min_overlap_value] next_next_candidates =", "self.xtreeTopologicalSplit(node, entry) entry_collection1, entry_collection2, dimension = result1 mbr_collection1 = [x.getMBR() for x in", "15]: for entry in entries: # if len(tree.getNodes()) != 0: # print \"removing", "False return True def toNumChildrenString(self): root = self.getRootEntry().getChild() return self.toNumChildrenStringHelper(root) def toNumChildrenStringHelper(self, node):", "raw_mbr.clone() next_node = RTreeNode(None, [], True) next_entry = RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) conflict_x_tree.insert(next_entry) elif", "is_root_node == False: overall_str_list = [node.getEntry().getMBR().toString()] # overall_str_list = [node.getEntry().getMBR().toString(), str(node)] else: overall_str_list", "x[0]) == True] for tagged_overlapped_mbr in tagged_overlapped_mbr_list: curr_mbr, curr_entry = tagged_overlapped_mbr curr_node =", "mbr_list] enlargement_values = [x[0] for x in tagged_enlargement_values] min_enlargement_value = min(enlargement_values) candidate_tagged_enlargement_values =", "curr_mbr_is_actual = curr_mbr.isRaw() curr_mbr_is_contained = reference_mbr.doesEnclose(curr_mbr) curr_mbr_area = curr_mbr.getArea() first_priority_component = 0 if", "next_y = multiplier * y image.strokeColor(\"none\") image.fillColor(\"black\") center_x = next_x + offset center_y", "to find the entry containing node; just look at parent of entry child", "(1776, 1662, 455)), \\ ((596, 892, 131), (1543, 1838, 669)), \\ ((879, 319,", "change_y ** 2) return distance class RTreeNode: def __init__(self, parent, entries, is_leaf, entry", "= tree.getRootEntry() curr_entries = entry.getChild().getEntries() children = [x.getChild() for x in curr_entries] mbr_list", "curr_entry # internal_node_stack_deque.appendleft(item) priority = (first_priority_component, second_priority_component) priority_tagged_internal_entry = (priority, curr_entry) priority_tagged_internal_entries.append(priority_tagged_internal_entry) #", "None lower_right = None \"\"\" if i % 4 == 0: upper_left =", "if (self.getRootEntry().getChild().getNumEntries() + 1) <= self.getRootEntry().getChild().getMaximumNumEntriesPerNode(): self.getRootEntry().getChild().addEntry(ee) ll.setParent(self.getRootEntry().getChild()) else: split_result = self.rstarSplitNode(self.getRootEntry().getChild(), ee)", "@staticmethod def rstarPreadjustTree(self, leaf_node): node = leaf_node parent = node.getParent() if parent !=", "M and x[0] >= m and x[1] <= M and x[1] >= m]", "keep_nodes = [x for x in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() == True] for keep_node", "number of actual rectangles # or leaves in r-tree; these times assume \"maximal", "curr_child = entry.getChild() (self.child_to_entry_dict).pop(curr_child) def getMinimumNumEntriesPerNode(self): return self.m def getMaximumNumEntriesPerNode(self): return self.M def", "self.getDimension()): comp_2a = upper_left[j] comp_2b = lower_right[j] term2 = comp_2b - comp_2a term", "entry1 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection1), None) node1 = RTreeNode(parent, entry_collection1, None, entry1) entry1.setChild(node1) entry2 =", "= str(node.getNumChildren()) overall_str_list = [curr_leaf_status] else: overall_str_list = [] for entry in entries:", "split_status == RTree.SUPERNODE: pass # print \"no split\" return (RTree.NO_SPLIT, [node]) def rstarInsert(self,", "mbr_b) == False: return 0 else: upper_left_a = mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight() upper_left_b", "= parent.retrieveEntryForChild(curr_node) parent.removeEntry(original_entry) if node != self.getRootEntry().getChild(): parent.addEntry(entry1) parent.addEntry(entry2) node1.setParent(parent) node2.setParent(parent) else: next_root", "= int(random.randint(1, 100)) # y = 10 # z = 10 # lower_right", "# x-tree featuring enclosure and containment queries # dimension is implicit (determined using", "entry.getChild().getEntries() for curr_entry in entries: if curr_entry.getMBR().doesEnclose(mbr) == True: self.doEnclosureQueryHelper(mbr, curr_entry, partial_result) def", "pass MAX_OVERLAP_RATIO = 0.2 def xtreeSplitNode(self, node, entry): # we never split a", "curr_node in node.getChildren(): self.getNodesHelper(curr_node, partial_result) def getNodesForNode(self, node): node_list = [] self.getNodesHelper(node, node_list)", "PythonMagick.Image(PythonMagick.Geometry(\"768x768\"), \"white\") image = PythonMagick.Image(PythonMagick.Geometry(\"1536x1536\"), \"white\") root_entry = self.getRootEntry() entries = [root_entry] RTreeEntry.draw(self,", "[node]) def rstarInsert(self, entry): leaf_node = self.rstarChooseLeaf(entry) adjust_result = None if leaf_node.isFull() ==", "conflict x-tree, # add actual rectangles to conflict x-tree, # use as priority", "= [] self.getNodesHelper(self.getRootEntry().getChild(), node_list) return node_list def getNodesHelper(self, node, partial_result): partial_result.append(node) for curr_node", "if node.isSuperNode() == True and node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): node.setToSuperNode(False) if node.getParent() == None:", "= 5500 # 23.899 seconds (~55.96x slower for 55x growth; expected 317x slower)", "chosen_distribution_pair def chooseLeaf(self, entry): return self.chooseLeafHelper(entry, self.getRootEntry().getChild()) def chooseLeafHelper(self, entry, node): if node.isLeafNode()", "= sum(upper_margin_values) S_comp_value += upper_margin_value_sum S_comp_dict[i] = S_comp_value d_S_pairs = S_comp_dict.items() min_S_value =", "= node E_overall = list(set(curr_node.getEntries() + [entry])) return self.rstarSplitNodeHelper(node, E_overall, entry) def rstarSplitNodeHelper(self,", "tuple(min_components) lower_right_point = tuple(max_components) result_mbr = CompositeMBR(upper_left_point, lower_right_point, component_mbr_list) return result_mbr class HyperRectangle:", "= CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) @staticmethod def rstarAdjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split): return tree.rstarAdjustTreeHelper(tree, node, resulting_entries_from_split,", "<= 1: # raise Exception() node.setToSuperNode(False) elif node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): mbr_list = [x.getMBR()", "= partner_node.getEntries() partner_children = [x.getChild() for x in partner_entries] partner_mbr_list = [x.getMBR() for", "mbr exists in conflict x-tree continue if entry == ignore_entry: # ignore node", "= split_result adjust_result = RTree.rstarAdjustTree(self, l, [e, ee], True) ended_with_split2, resulting_entries_from_split = adjust_result", "parent.addEntry(entry2) node1.setParent(parent) node2.setParent(parent) else: next_root = RTreeNode(None, [entry1, entry2], False) self.getRootEntry().setChild(next_root) next_root.setEntry(self.getRootEntry()) node1.setParent(next_root)", "x[0][1]), x[1]) for x in mbr_pair_tagged_candidate_distributions] overlap_values = [x[0] for x in overlap_value_tagged_candidate_distributions]", "= 4 perimeter_x = next_x1 perimeter_y = next_y1 + radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x,", "len(tree.getNodes()) != 0: # print \"removing entry with mbr:\", entry.getMBR().toString() # print \"tree,", "[] self.doContainmentQueryHelper(mbr, self.getRootEntry(), partial_result) return partial_result def doContainmentQueryHelper(self, mbr, entry, partial_result): if entry.getMBR().isRaw()", "1) * root_mbr_area # min-pq priority = (first_priority_component, second_priority_component) # priority = -1", "entry): curr_child = entry.getChild() (self.child_to_entry_dict).pop(curr_child) def getMinimumNumEntriesPerNode(self): return self.m def getMaximumNumEntriesPerNode(self): return self.M", "point3) curr_mbr2b = RawMBR((50, 50, 0), (100, 100, 0), HyperRectangle((50, 50, 0), (100,", "= [(MBR.getAreaEnlargement(x, mbr), x) for x in mbr_list] enlargement_values = [x[0] for x", "means maximal disjointedness # is not going to be good enough to cut", "= self.getRootEntry().getChild().getMaximumNumEntriesPerNode() axis = RTree.rstarChooseSplitAxis(E_overall, M, m) result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m)", "lower_right[0] margin = x2 - x1 return margin if self.getDimension() == 2: x1,", "entries: if MBR.doOverlap(curr_entry.getMBR(), mbr) == True: self.doContainmentQueryHelper(mbr, curr_entry, partial_result) # prefix order def", "1: color = PythonMagick.Color(0, 0, 65535, 32767) elif color_choice == 2: color =", "else: entries = curr_entry.getChild().getEntries() for next_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), entry.getMBR()) == True:", "pass return (node1, node2, entry1, entry2) @staticmethod def rstarPreadjustTree(self, leaf_node): node = leaf_node", "47 y2 = 60 next_x1 = x1 * multiplier + offset next_y1 =", "[x[0].getMarginValue() + x[1].getMarginValue() for x in low_mbr_pairs] low_margin_value_sum = sum(low_margin_values) S_comp_value += low_margin_value_sum", "\\ ((945, 260, 1091), (1932, 332, 1133)), \\ ((262, 221, 872), (500, 279,", "low_margin_values = [x[0].getMarginValue() + x[1].getMarginValue() for x in low_mbr_pairs] low_margin_value_sum = sum(low_margin_values) S_comp_value", "10, 10), (6, 10, 10), (9, 10, 10), (6, 10, 10), (9, 10,", "= [(MBR.findOverlapArea(x[0][0], x[0][1]), x[1]) for x in mbr_pair_tagged_candidate_distributions] overlap_values = [x[0] for x", "entry.getChild().getEntries() for curr_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), mbr) == True: self.doContainmentQueryHelper(mbr, curr_entry, partial_result)", "upper_margin_value_sum = sum(upper_margin_values) S_comp_value += upper_margin_value_sum S_comp_dict[i] = S_comp_value d_S_pairs = S_comp_dict.items() min_S_value", "next_curr_node = curr_entry.getChild() if curr_entry != entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node1) for curr_entry in entry_group2:", "y in x[0]], [y.getMBR() for y in x[1]]) for x in low_comp_distributions] low_mbr_pairs", "RawMBR((50, 100, 0), (50, 100, 0), point3) curr_mbr2b = RawMBR((50, 50, 0), (100,", "time at worst; # assumes that rectangles are distinct # return a list", "x2 = lower_right[0] margin = x2 - x1 return margin if self.getDimension() ==", "(110, 200, 100), point7) node7 = RTreeNode(None, [], True) entry7 = RTreeEntry(mbr7, node7)", "sampled) and assumed to be consistent # we never split a super-node #", "100, 0), point3) curr_mbr2b = RawMBR((50, 50, 0), (100, 100, 0), HyperRectangle((50, 50,", "vec self.id_value = id_value @staticmethod def toPoint(mbr): if mbr.getUpperLeft() != mbr.getLowerRight(): raise Exception(\"attempted", "\" \") + \")\" return overall_str def toLeafStatusString(self): root = self.getRootEntry().getChild() return self.toLeafStatusStringHelper(root)", "tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) partner_entry = None if have_resulting_second_entry_from_split == True: first_entry, second_entry", "\\ ((596, 892, 131), (1543, 1838, 669)), \\ ((879, 319, 789), (1877, 744,", "(500, 279, 1521)), \\ ((332, 886, 493), (822, 1305, 1149)), \\ ((800, 709,", "0.427 seconds (~1x slower for 1x growth; expected 1x slower) # n =", "result = tree2.getAllRectangleCloseAncestors() time2 = time.time() time_diff = time2 - time1 print \"time", "# for a well-formed r-tree, this takes O(n * log(n)) time, # where", "if MBR.doOverlap(curr_entry.getMBR(), entry.getMBR()) == True: result = self.findLeafHelper(entry, next_entry) if result == True:", "node): split_status = None next_mbr = None if True: # if node.getNumChildren() ==", "** (1 / 3.0) / denominator)) # for n = 20000 # k", "= None if have_node_str == True: curr_depth = \"-\" if node.getNumEntries() != 0", "pair_list] return items def getSize(self): return len(self.heap) import math def getDistance(point1, point2): x1,", "is_leaf_node def addEntry(self, entry): curr_child = entry.getChild() (self.child_to_entry_dict)[curr_child] = entry def removeEntry(self, entry):", "seconds (~55.96x slower for 55x growth; expected 317x slower) # n = 10000", "id_value): self.vec = vec self.id_value = id_value @staticmethod def toPoint(mbr): if mbr.getUpperLeft() !=", "M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries() axis = RTree.rstarChooseSplitAxis(E_overall, M, m) result =", "== False: curr_mbr = entry.getMBR() entries = self.getEntries() tagged_mbr_list = [(x.getMBR(), x) for", "we should add to conflict x-tree result_entry_list.append(entry) raw_mbr = mbr next_mbr = raw_mbr.clone()", "self.getNodesHelper(curr_node, partial_result) def getNodesForNode(self, node): node_list = [] self.getNodesHelper(node, node_list) return node_list \"\"\"", "rstarSplitNodeHelper(self, node, E_overall, entry): # prev_leaf_status = node.isLeafNode() prev_leaf_status = None curr_node =", "- 1 + k for k in range(1, M - 2 * m", "parent.getChildren()): pass \"\"\" node1 = RTreeNode(parent, entry_group1, prev_leaf_status) node2 = RTreeNode(parent, entry_group2, prev_leaf_status)", "= (priority,item) # if curr_mbr.doesEnclose(reference_mbr) == True or reference_mbr.doesEnclose(curr_mbr) == True: # heapq.heappush(heap,pair)", "for i in xrange(dimension): comp_a1 = upper_left_a[i] comp_a2 = lower_right_a[i] comp_b1 = upper_left_b[i]", "and right_value1 >= right_value2 if component_does_enclose == False: does_enclose = False break return", "z) # lower_right = lower_rights[i] mbr = RawMBR(upper_left, lower_right, None) node = RTreeNode(None,", "x <= M and x >= m and (len(entries) - x) <= M", "self.getRootEntry(), partial_result) return partial_result def doContainmentQueryHelper(self, mbr, entry, partial_result): if entry.getMBR().isRaw() == True:", "10), (3, 10, 10)] # for i in xrange(10): # for i in", "in mbr_list_pair_tagged_candidate_distributions] overlap_value_tagged_candidate_distributions = [(MBR.findOverlapArea(x[0][0], x[0][1]), x[1]) for x in mbr_pair_tagged_candidate_distributions] overlap_values =", "(30, 100, 0) point2 = (40, 100, 0) point3 = (50, 100, 0)", "self.getRootEntry().getChild() return self.toLeafStatusStringHelper(root) def toLeafStatusStringHelper(self, node): if node == None: return \"\" entries", "entry): # prev_leaf_status = node.isLeafNode() prev_leaf_status = None curr_node = node m =", "= RTreeEntry(mbr8, node8) node8.setEntry(entry8) # problem here tree.insert(entry8) print tree.toString() print tree.doEnclosureQuery(curr_mbr2) curr_mbr3", "it has an actual rectangle # decide whether to include associated entry in", "denominator)) # for n = 5500 # k = int(round((10000 * math.log(10000, 2))", "depth + 1 return self.hasConsistentNonTraditionalLeafDepthValuesHelper(root, depth, 0) def hasConsistentNonTraditionalLeafDepthValuesHelper(self, node, depth, curr_depth): if", "if depth != curr_depth: return False else: return True else: for curr_node in", "== True: if MBR.doOverlap(entry.getMBR(), mbr, without_borders) == True: partial_result.append(entry) else: entries = entry.getChild().getEntries()", "1521)), \\ ((798, 928, 1028), (1762, 1795, 1309)), \\ ((225, 359, 290), (579,", "= [] max_components = [] for i in xrange(component_mbr_list[0].getDimension()): components = [x[i] for", "lower_right, id_value): self.upper_left = upper_left self.lower_right = lower_right self.id_value = id_value def getUpperLeft(self):", "seconds (~2.72x slower for 10x growth; expected 33x slower) # n = 5500", "== None: # we are a root node if self.getRootEntry().getChild().getNumChildren() == 0: root_node", "(70, 100, 0) point6 = (80, 100, 0) point7 = (90, 100, 0)", "and M = 4 # n = 1,000 works in 2.996 sec. for", "pass color_choice = depth % 3 color = None if color_choice == 0:", "= RawMBR(point1, (110, 200, 100), point1) node1 = RTreeNode(None, [], True) entry1 =", "dim): self.split_history_root_dimension = dim def getParent(self): return self.parent def getEntries(self): return (self.child_to_entry_dict).values() def", "return (RTree.SPLIT, [node1, node2]) else: self.xtreeSupernodeInsert(node, [x.getEntry() for x in added_nodes]) # print", "return candidate_entries @staticmethod def rstarGenDistributions(entries, M, m): result_list = [] if len(entries) >", "(multiplier * x2 + offset, multiplier * y2 + offset) if depth !=", "100 # k = int(round((1000 * math.log(1000, 2)) ** (1 / 3.0) /", "= tree2.getAllRectangleCloseAncestors() time2 = time.time() time_diff = time2 - time1 print \"time difference:\",", "second_entry = resulting_entries_from_split partner_entry = second_entry if have_resulting_second_entry_from_split == True and is_first_call_after_first_pass !=", "comp_a2 >= comp_b1 if do_overlap == False: break return do_overlap @staticmethod def findOverlapArea(mbr_a,", "self.upper_left = upper_left self.lower_right = lower_right def isRaw(self): return False def isComposite(self): return", "0: # print \"removing entry with mbr:\", entry.getMBR().toString() # print \"tree, currently:\", tree.toString()", "+ string.join(overall_str_list, \" \") + \")\" return overall_str def toDepthString(self): root = self.getRootEntry().getChild()", ": 4]: # print \"supernodes:\", [x for x in tree.getNodes() if x.isSuperNode() ==", "next_candidate_distributions = next_next_candidates mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for y in x[0]], [y.getMBR() for y", "= 0.2 def xtreeSplitNode(self, node, entry): # we never split a super-node if", "entry) if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\" chosen_entry = candidate_entries[0]", "= RTreeNode(parent, entry_group2, prev_leaf_status) for curr_entry in entry_group1: curr_entry.getChild().setParent(node1) for curr_entry in entry_group2:", "result class CompositeMBR(MBR): def __init__(self, upper_left, lower_right, mbr_list): MBR.__init__(self, upper_left, lower_right) self.mbr_list =", "curr_entry, partial_result) def doEnclosureQueryWithEarlyStopping(self, mbr): result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, self.getRootEntry()) return result def doEnclosureQueryWithEarlyStoppingHelper(self,", "curr_x_tree = RTree() overlap_area_sum = sum([x.getArea() for x in mbr_list]) for curr_mbr in", "node, partial_result): partial_result.append(node) for curr_node in node.getChildren(): self.getNodesHelper(curr_node, partial_result) def getNodesForNode(self, node): node_list", "doesMatch(self, mbr): upper_left_matches = self.getUpperLeft() == mbr.getUpperLeft() lower_right_matches = self.getLowerRight() == mbr.getLowerRight() result", "adjustTree(); # stop at root instead of non-existent parent of root; # also,", "node6.setEntry(entry6) tree.insert(entry6) mbr7 = RawMBR(point7, (110, 200, 100), point7) node7 = RTreeNode(None, [],", "(x2, y2, z2) upper_left = (x, y) lower_right = (x, y) # upper_left", "zero entries after removing this entry, this should be okay leaf_node.removeEntry(entry) self.condenseTree(leaf_node) #", "== False: continue # item = curr_entry # internal_node_stack_deque.appendleft(item) priority = (first_priority_component, second_priority_component)", "= split_result resulting_entries_from_split = [e, ee] next_root = RTreeNode(None, resulting_entries_from_split, False, self.getRootEntry()) l.setParent(next_root)", "x) for x in candidate_distributions] mbr_pair_tagged_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for x in", "0: raise Exception() if self.getDimension() == 1: x1 = upper_left[0] x2 = lower_right[0]", "0.2 def xtreeSplitNode(self, node, entry): # we never split a super-node if node.isSuperNode()", "node # find all entries of RN that cover E.mbr # follow the", "is_leaf, entry = None, split_history_root_dimension = None, is_supernode = False): self.parent = parent", "if i % 4 == 0: upper_left = (0, 0) lower_right = (10,", "continue else: return curr_node return None \"\"\" # a little stilted since we", "y1 = upper_left x2, y2 = lower_right margin = 2 * (x2 -", "findOverlapArea(mbr_a, mbr_b): if MBR.doOverlap(mbr_a, mbr_b) == False: return 0 else: upper_left_a = mbr_a.getUpperLeft()", "node, entry): if node.getSplitHistoryRootDimension() == None: return (None, None, None, True) else: m", "queue, # ignore if contained rectangle is contained by a rectangle in conflict", "the root # set as new root its only child pass def condenseTree(self,", "0 return is_leaf_node def addEntry(self, entry): curr_child = entry.getChild() (self.child_to_entry_dict)[curr_child] = entry def", "# lower_right = (x, y, z) # lower_right = lower_rights[i] mbr = RawMBR(upper_left,", "have_resulting_second_entry_from_split): if node.getParent() == None: entry = tree.getRootEntry() curr_entries = entry.getChild().getEntries() children =", "= root_entry.getMBR() root_mbr_is_actual = root_mbr.isRaw() root_mbr_is_contained = reference_mbr.doesEnclose(root_mbr) root_mbr_area = root_mbr.getArea() first_priority_component =", "curr_node = curr_entry.getChild() curr_mbr = curr_entry.getMBR() curr_mbr_is_actual = curr_mbr.isRaw() curr_mbr_is_contained = reference_mbr.doesEnclose(curr_mbr) curr_mbr_area", "= [x for x in combined_area_tagged_next_candidate_distributions if x[0] == min_combined_area_value] next_next_candidates = [x[1]", "\"white\") draw = ImageDraw.Draw(im) root = self.getRoot() root.draw(self, draw, 0) im.save(\"tree.png\", \"PNG\") \"\"\"", "str(depth) overall_str_list = [curr_depth] else: overall_str_list = [] for entry in entries: child", "\"\" entries = node.getEntries() children = node.getChildren() have_node_str = True is_root_node = node", "[node1, node2]) else: self.xtreeSupernodeInsert(node, [x.getEntry() for x in added_nodes]) # print \"supernode #1\"", "\\ ((358, 815, 372), (761, 1089, 594)), \\ ((294, 238, 1036), (785, 378,", "/ denominator)) # for n = 14500 # x1 = int(100 + random.randint(0,", "def isFull(self): return self.getNumEntries() >= self.getMaximumNumEntriesPerNode() def isUnderfull(self): return self.getNumEntries() < self.getMinimumNumEntriesPerNode() def", "associated mbr is not contained within reference mbr continue if conflict_x_tree.doEnclosureQueryWithEarlyStopping(mbr) == True:", "entry.getChild().getParent() mbr = entry.getMBR() location = Point.toPoint(mbr) x, y = location multiplier =", "x in upper_comp_distributions] upper_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x in upper_constituent_mbr_list_pairs] upper_margin_values =", "upper_left_points + lower_right_points min_components = [] max_components = [] for i in xrange(component_mbr_list[0].getDimension()):", "result = str(list(upper_left + lower_right) + [self.isRaw()]) return result def getDimension(self): return len(self.getUpperLeft())", "mbr_to_entry_dict = {} for i in range(len(entries)): curr_entry = entries[i] curr_mbr = curr_entry.getMBR()", "entry, partial_result, without_borders): if entry.getMBR().isRaw() == True: if MBR.doOverlap(entry.getMBR(), mbr, without_borders) == True:", "def delete(self, entry): # print \"hello\" did_find_leaf = self.findLeaf(entry) child_node = entry.getChild() #", "750), (1085, 718, 1259)), \\ ((808, 926, 151), (889, 1755, 320)), \\ ((945,", "self.heap items = [x[1] for x in pair_list] return items def getSize(self): return", "self.heap pair = heap[0] result = pair return result def toList(self): pair_list =", "3.428 sec. for pypy with m = 8 and M = 16 #", "[entry], False) else: split_result = self.rstarSplitNode(leaf_node, entry) l, ll, e, ee = split_result", "or (node.getParent() != None and node in node.getParent().getChildren())) == False else \"+\" overall_str_list", "multiplier * y1 + offset) next_x2, next_y2 = (multiplier * x2 + offset,", "RTree.rstarGenDistributions(entries, M, m) S_comp_dict = {} for i in xrange(entries[0].getMBR().getDimension()): low_comp_distributions, upper_comp_distributions =", "in combined_area_tagged_next_candidate_distributions if x[0] == min_combined_area_value] next_next_candidates = [x[1] for x in matching_combined_area_tagged_next_candidate_distributions]", "* term1 * term2 surface_area += term margin = surface_area return margin def", "time time1 = time.time() result = tree2.getAllRectangleCloseAncestors() time2 = time.time() time_diff = time2", "[x.getEntry() for x in added_nodes]) # print \"supernode #1\" return (RTree.SUPERNODE, [node]) elif", "== 0: # if node.isNonTraditionalLeafNode() == True: if node.isLeafNode() == True and node", "return 0 else: upper_left_a = mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft() lower_right_b", "\"\"\" follow = self.chooseSubtree(entry, node).getChild() result = self.xtreeInsertHelper(entry, follow) split_status, added_nodes = result", "curr_entry: return True else: return False else: entries = curr_entry.getChild().getEntries() for next_entry in", "\"\"\" ul_lr_pairs = [((797, 989, 602), (910, 1248, 1035)), \\ ((920, 974, 724),", "xrange(n): upper_left = None lower_right = None \"\"\" if i % 4 ==", "(priority, curr_entry) priority_tagged_internal_entries.append(priority_tagged_internal_entry) # item = curr_entry # pair = (priority,item) # if", "\"right\" comp_a2 = max(upper_left_a[i], lower_right_a[i]) # b \"left\" comp_b1 = min(upper_left_b[i], lower_right_b[i]) #", "never has a raw mbr # leaf is a non-traditional leaf leaf_node =", "the corresponding subtrees unti lthe leaf L that contains E is found #", "conflict x-tree result_entry_list.append(entry) raw_mbr = mbr next_mbr = raw_mbr.clone() next_node = RTreeNode(None, [],", "(100 * math.log(100, 2)) ** (1 / 3.0) k = 1 # k", "(1293, 619, 1521)), \\ ((798, 928, 1028), (1762, 1795, 1309)), \\ ((225, 359,", "we increase n and do not increase domains and # high inter-group overlap", "# raise Exception() return (False, None, None, None) dimension = None result1 =", "entry_to_close_ancestor_entry_list_pair in result.items(): entry, close_ancestor_entry_list = entry_to_close_ancestor_entry_list_pair print \"start rectangle:\", entry.getMBR().toString() for close_ancestor_entry", "root has only one child (and it is not a leaf) # remove", "mbr): base_mbr_area = base_mbr.getArea() enlarged_mbr = MBR.getEnlargedMBR(base_mbr, mbr) enlarged_mbr_area = enlarged_mbr.getArea() area_change =", "mbr1 = CompositeMBR.makeMBR(mbr_collection1) mbr2 = CompositeMBR.makeMBR(mbr_collection2) overlap_area = MBR.findOverlapArea(mbr1, mbr2) area1 = mbr1.getArea()", "0: parent = entry.getChild().getParent() mbr = entry.getMBR() location = Point.toPoint(mbr) x, y =", "color_choice == 2: color = PythonMagick.Color(0, 65535, 0, 32767) if upper_left == lower_right:", "(~55.96x slower for 55x growth; expected 317x slower) # n = 10000 #", "root its only child pass def condenseTree(self, leaf_node): Q = [] self.condenseTreeHelper(leaf_node, Q)", "\")\" return overall_str def toString(self): root = self.getRootEntry().getChild() return self.toStringHelper(root) def toStringHelper(self, node):", "return node.getParent() else: entries = node.getEntries() candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries) !=", "RTreeNode(parent, entry_group2, prev_leaf_status) for curr_entry in entry_group1: curr_entry.getChild().setParent(node1) for curr_entry in entry_group2: curr_entry.getChild().setParent(node2)", "overlap_area ovelap_ratio = None if union_area == 0: if mbr1.isEqualTo(mbr2) == True: overlap_ratio", "self.xtreeSupernodeInsert(node, [x.getEntry() for x in added_nodes]) # print \"supernode #1\" return (RTree.SUPERNODE, [node])", "internal_entry internal_node_stack_deque.appendleft(item) # print \"conflict x-tree:\", conflict_x_tree.toString() # for a well-formed r-tree, this", "finding; the assumption is necessary # to make strong running time estimates; the", "False) else: parent.addEntry(entry) entry.getChild().setParent(parent) split_result = tree.splitNode(parent, partner_entry) l, ll, e, ee =", "upper_left_a = mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight() do_overlap", "distinct # return a list of entries def getRectangleCloseDescendants(self, reference_entry): # repeatedly pop", "!= 0: # print \"removing entry with mbr:\", entry.getMBR().toString() # print \"tree, currently:\",", "== True and lower_right_matches == True return result class CompositeMBR(MBR): def __init__(self, upper_left,", "in tagged_enlargement_values] min_enlargement_value = min(enlargement_values) candidate_tagged_enlargement_values = [x for x in tagged_enlargement_values if", "always point to same entries # unless we explicitly create new entries, #", "= reference_mbr.doesEnclose(root_mbr) root_mbr_area = root_mbr.getArea() first_priority_component = 0 if root_mbr_is_contained == True else", "\"not underfull\" parent = node.getParent() curr_entries = node.getEntries() entry = parent.retrieveEntryForChild(node) children =", "[] if node.getNumChildren() == 0 else [node.getEntry().getMBR().toString()] # overall_str_list = [] if node.getNumChildren()", "= RawMBR(upper_left, lower_right, point) return result_mbr def getContainedItem(self): return self.contained_item def getMBRList(self): return", "x[0]], [y.getMBR() for y in x[1]]) for x in upper_comp_distributions] upper_mbr_pairs = [(CompositeMBR.makeMBR(x[0]),", "min_S_value] chosen_d_S_pair = min_S_value_d_S_pair_candidates[0] chosen_d_value = chosen_d_S_pair[0] return chosen_d_value @staticmethod def rstarChooseSplitIndex(entries, axis,", "+ offset, multiplier * y1 + offset) next_x2, next_y2 = (multiplier * x2", "= 2 * term1 * term2 surface_area += term margin = surface_area return", "split_result if was_successful == True: mbr_collection1 = [x.getMBR() for x in entry_collection1] mbr_collection2", "# assume that rectangles never have negative area for i in xrange(mbr_a.getDimension()): #", "chosen_d_S_pair = min_S_value_d_S_pair_candidates[0] chosen_d_value = chosen_d_S_pair[0] return chosen_d_value @staticmethod def rstarChooseSplitIndex(entries, axis, M,", "mbr.getLowerRight(): raise Exception(\"attempted to turn a non-point mbr to a point\") return mbr.getUpperLeft()", "+ change_y ** 2) return distance class RTreeNode: def __init__(self, parent, entries, is_leaf,", "# n = 5500 # 23.899 seconds (~55.96x slower for 55x growth; expected", "= self.getUpperLeft() lower_right = self.getLowerRight() contained_item = self.getContainedItem() mbr = RawMBR(upper_left, lower_right, contained_item)", "def getParent(self): return self.parent def getEntries(self): return (self.child_to_entry_dict).values() def getEntryForChild(self, child_node): return (self.child_to_entry_dict)[child_node]", "def getVec(self): return self.vec def getComponent(self, d): return self.getVec()[d] def getIDValue(self): return self.id_value", "20000 n = 1000 import math for i in xrange(n): upper_left = None", "two works import sys # import PythonMagick import heapq from collections import deque", "self.getEntries()]) is_leaf_node = self.getNumChildren() == 0 return is_leaf_node def addEntry(self, entry): curr_child =", "curr_entry) if result == True: return True return False # returns entries def", "self.getRoot() root.draw(self, draw, 0) im.save(\"tree.png\", \"PNG\") \"\"\" # image = PythonMagick.Image(PythonMagick.Geometry(\"768x768\"), \"white\") image", "\"underfull\" parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) # don't use isLeafNode() for this, as internal", "overall_str def chooseEntriesWithMinimalOverlapEnlargement(self, entries, entry): mbr_to_entry_dict = {} for i in range(len(entries)): curr_entry", "(~538x slower for 200x growth; expected 1528x slower) # n = 2000 #", "entry_group2] curr_overall_mbr1 = CompositeMBR.makeMBR(mbr_group1) curr_overall_mbr2 = CompositeMBR.makeMBR(mbr_group2) for curr_entry in entry_group1: next_curr_node =", "200, 100), point7) node7 = RTreeNode(None, [], True) entry7 = RTreeEntry(mbr7, node7) node7.setEntry(entry7)", "if mbr.doesEnclose(entry.getMBR()) == True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for curr_entry in entries:", "on average for start rectangle # taken from set of actual rectangles for", "margin = x2 - x1 return margin if self.getDimension() == 2: x1, y1", "x-tree, # use as priority (prefer_contained, prefer_large_area_if_contained_else_small) if self.getRootEntry().getChild().getNumChildren() == 0: return []", "for y in x[0]], [y.getMBR() for y in x[1]]), x) for x in", "y2 = lower_right multiplier = 1 / (1.0 * 6.5) * 0.8 offset", "for i in xrange(n): upper_left = None lower_right = None \"\"\" if i", "point5) node5 = RTreeNode(None, [], True) entry5 = RTreeEntry(mbr5, node5) node5.setEntry(entry5) tree.insert(entry5) mbr6", "contained_item def isRaw(self): return True @staticmethod def makeMBRFromPoint(point): upper_left = point lower_right =", "= [x.getLowerRight() for x in component_mbr_list] points = upper_left_points + lower_right_points min_components =", "getMBR(self): return self.mbr def setMBR(self, mbr): self.mbr = mbr def getChild(self): return self.child", "entry.getChild() child_str = self.toStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list,", "= {} for i in xrange(entries[0].getMBR().getDimension()): low_comp_distributions, upper_comp_distributions = result[i] S_comp_value = 0", "return RTree.rstarAdjustTreeHelper(tree, node.getParent(), resulting_entries_from_split, have_resulting_second_entry_from_split) \"\"\" @staticmethod def adjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split, is_first_call_after_first_pass):", "works in 2.996 sec. for pypy with m = 2 and M =", "min_overlap_value = min(overlap_values) matching_overlap_value_tagged_candidate_distributions = [x for x in overlap_value_tagged_candidate_distributions if x[0] ==", "724), (1802, 1524, 1378)), \\ ((911, 953, 196), (1776, 1662, 455)), \\ ((596,", "dim def getParent(self): return self.parent def getEntries(self): return (self.child_to_entry_dict).values() def getEntryForChild(self, child_node): return", "== True and is_first_call_after_first_pass != True: partner_node = partner_entry.getChild() partner_entries = partner_node.getEntries() partner_children", "+ lower_right_points min_components = [] max_components = [] for i in xrange(base_mbr.getDimension()): components", "= (priority,item) heapq.heappush(heap,pair) elif curr_mbr.isRaw() == False: if curr_mbr.doesEnclose(reference_mbr) == False and reference_mbr.doesEnclose(curr_mbr)", "to high-level; # wish to insert using order of high-level to low-level #", "= 0 low_constituent_mbr_list_pairs = [([y.getMBR() for y in x[0]], [y.getMBR() for y in", "M, m) result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2 = result parent", "case if reference_mbr.doesEnclose(mbr) == False: continue # kick out close descendant candidates on", "node.getEntries() axis = RTree.rstarChooseSplitAxis(E_overall, M, m) result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1,", "100) # z2 = int(z1 + random.random() * 100) x = random.randint(0, 10000)", "tagged_mbr_list] area_values = [x[0] for x in tagged_area_values] min_area = min(area_values) candidate_tagged_area_values =", "[x[0] for x in combined_area_tagged_next_candidate_distributions] min_combined_area_value = min(combined_area_values) matching_combined_area_tagged_next_candidate_distributions = [x for x", "j in xrange(len(window_left_sizes))] upper_sorted_entries = entries[ : ] upper_sorted_entries.sort(key = lambda x: x.getMBR().getLowerRight()[i])", "depth + 1) class MBR: def __init__(self, upper_left, lower_right): self.upper_left = upper_left self.lower_right", "RTreeNode(None, [], True) entry4 = RTreeEntry(mbr4, node4) node4.setEntry(entry4) tree.insert(entry4) mbr5 = RawMBR(point5, (110,", "= self.getDimension() does_enclose = True for i in xrange(dimension): left_value1 = self.getUpperLeft()[i] left_value2", "in candidate_tagged_enlargement_values] return candidate_entries def resolveEnlargementTie(self, entries, entry): mbr = entry.getMBR() tagged_mbr_list =", "have_node_str = True overall_str_list = None if have_node_str == True: curr_leaf_status = \"-\"", "for x in combined_area_tagged_next_candidate_distributions] min_combined_area_value = min(combined_area_values) matching_combined_area_tagged_next_candidate_distributions = [x for x in", "= reduce(lambda x, y: x * y, sides) return intersection_volume def getMarginValue(self): upper_left", "815, 372), (761, 1089, 594)), \\ ((294, 238, 1036), (785, 378, 1963)), \\", "= int(z1 + random.random() * 100) x = random.randint(0, 10000) y = random.randint(0,", "0), 1)) tree = RTree() print tree.toString() curr_root = tree.getRootEntry().getChild() mbr1 = RawMBR(point1,", "made it this far, we should add to conflict x-tree result_entry_list.append(entry) raw_mbr =", "prev_leaf_status = None curr_node = node m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() axis", "if result == True: return result return False def delete(self, entry): # print", "mbr_list def getMBRList(self): return self.mbr_list def isComposite(self): return True @staticmethod def makeMBR(component_mbr_list): upper_left_points", "= mbr_a.getDimension() sides = [] for i in xrange(dimension): comp_a1 = upper_left_a[i] comp_a2", "return True @staticmethod def makeMBRFromPoint(point): upper_left = point lower_right = point result_mbr =", "[x for x in window_left_sizes if x <= M and x >= m", "x[0] <= M and x[0] >= m and x[1] <= M and x[1]", "# 0.427 seconds (~1x slower for 1x growth; expected 1x slower) # n", "MBR.doOverlap(curr_entry.getMBR(), mbr) == True: self.doOverlapQueryHelper(mbr, curr_entry, partial_result, without_borders) # returns entries def doEnclosureQuery(self,", "image = PythonMagick.Image(PythonMagick.Geometry(\"1536x1536\"), \"white\") root_entry = self.getRootEntry() entries = [root_entry] RTreeEntry.draw(self, entries, image,", "result = RTree.rstarGenDistributions(entries, M, m) candidate_distributions = None candidate_distributions = result[axis][0] + result[axis][1]", "(1802, 1524, 1378)), \\ ((911, 953, 196), (1776, 1662, 455)), \\ ((596, 892,", "RTreeNode(None, [], True) entry8 = RTreeEntry(mbr8, node8) node8.setEntry(entry8) # problem here tree.insert(entry8) print", "entry in entries: child = entry.getChild() child_str = self.toNumChildrenStringHelper(child) curr_str = child_str overall_str_list.append(curr_str)", "def __init__(self, vec, id_value): self.vec = vec self.id_value = id_value @staticmethod def toPoint(mbr):", "tree.insert(entry7) mbr8 = RawMBR(point8, (110, 200, 100), point8) node8 = RTreeNode(None, [], True)", "{} for start_rectangle_entry in start_rectangle_entries: start_rectangle_to_close_ancestor_entries_dict[start_rectangle_entry] = [] for start_rectangle_entry in start_rectangle_entries: close_descendant_entries", "self.lower_right = lower_right self.id_value = id_value def getUpperLeft(self): return self.upper_left def getLowerRight(self): return", "for next_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), entry.getMBR()) == True: result = self.findLeafHelper(entry, next_entry)", "returns entries matching_entries = conflict_x_tree.doContainmentQuery(mbr) for matching_entry in matching_entries: # raise Exception() conflict_x_tree.delete(matching_entry)", "item = curr_entry pair = (priority,item) heapq.heappush(heap,pair) elif curr_mbr.isRaw() == False: if curr_mbr.doesEnclose(reference_mbr)", "RTreeNode(None, [], True) entry = RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry) \"\"\" for i in", "[(MBR.getAreaEnlargement(x, mbr), x) for x in mbr_list] enlargement_values = [x[0] for x in", "= [] for i in xrange(self.getDimension()): comp1 = upper_left[i] comp2 = lower_right[i] side", "lower_right_points min_components = [] max_components = [] for i in xrange(base_mbr.getDimension()): components =", "x2 - x1 change_y = y2 - y1 distance = math.sqrt(change_x ** 2", "isEqualTo(self, mbr): upper_left1 = self.getUpperLeft() lower_right1 = self.getLowerRight() upper_left2 = mbr.getUpperLeft() lower_right2 =", "None if is_root_node == False: overall_str_list = [node.getEntry().getMBR().toString()] # overall_str_list = [node.getEntry().getMBR().toString(), str(node)]", "M and x >= m and (len(entries) - x) <= M and (len(entries)", "entry.setMBR(tight_overall_mbr) return else: # raise Exception() # print \"decision point\" \"\"\" if node.isSuperNode()", "keep_node in keep_nodes: Q.append(keep_node) # only makes sense to speak of modifying mbr", "root_entry def hasConsistentNonTraditionalLeafDepthValues(self): root = self.getRootEntry().getChild() curr_node = root depth = 0 while", "\"hello\" did_find_leaf = self.findLeaf(entry) child_node = entry.getChild() # root node never has a", "entry = tree.getRootEntry() curr_entries = entry.getChild().getEntries() children = [x.getChild() for x in curr_entries]", "60) lower_right = (80, 80) elif i % 4 == 3: upper_left =", "node): node_list = [] self.getNodesHelper(node, node_list) return node_list \"\"\" def getUnionArea(self): pass \"\"\"", "[] self.doEnclosureQueryHelper(mbr, self.getRootEntry(), partial_result) return partial_result def doEnclosureQueryHelper(self, mbr, entry, partial_result): if entry.getMBR().isRaw()", "(priority,item) = heapq.heappop(heap) entry = item node = entry.getChild() mbr = entry.getMBR() if", "+= term margin = surface_area return margin def toString(self): upper_left = self.getUpperLeft() lower_right", "next_result = (entry_group1, entry_group2, axis) return next_result def xtreeOverlapMinimalSplit(self, node, entry): if node.getSplitHistoryRootDimension()", "= next_x + offset center_y = next_y + offset radius = 2 perimeter_x", "# for i in xrange(10): # for i in xrange(4): \"\"\" ul_lr_pairs =", "candidate_tagged_area_values] return candidate_entries @staticmethod def rstarGenDistributions(entries, M, m): result_list = [] if len(entries)", "= [x for x in overlap_value_tagged_candidate_distributions if x[0] == min_overlap_value] next_next_candidates = [x[1]", "doEnclosureQueryWithEarlyStopping(self, mbr): result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, self.getRootEntry()) return result def doEnclosureQueryWithEarlyStoppingHelper(self, mbr, entry): if", "def rstarGenDistributions(entries, M, m): result_list = [] if len(entries) > (M + 1):", "None candidate_distributions = result[axis][0] + result[axis][1] mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for y in x[0]],", "entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node1) for curr_entry in entry_group2: next_curr_node = curr_entry.getChild() if curr_entry !=", "[x for x in self.getNodes() if x.getEntry().getMBR().isRaw() == True] start_rectangle_entries = [x.getEntry() for", "root = self.getRoot() root.draw(self, draw, 0) im.save(\"tree.png\", \"PNG\") \"\"\" # image = PythonMagick.Image(PythonMagick.Geometry(\"768x768\"),", "curr_root = tree.getRootEntry().getChild() mbr1 = RawMBR(point1, (110, 200, 100), point1) node1 = RTreeNode(None,", "involve n, which is number of actual rectangles # or leaves in r-tree;", "k in range(1, M - 2 * m + 2 + 1)] window_left_sizes", "just in case # print \"split\" return (RTree.SPLIT, [node]) elif node.isNonTraditionalLeafNode() == True:", "curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) self.condenseTreeHelper(node.getParent(), Q) return # not tested # returns", "root instead of non-existent parent of root; # also, we implement delete(); note", "does intersection query def doOverlapQuery(self, mbr, without_borders = False): partial_result = [] self.doOverlapQueryHelper(mbr,", "> (M + 1): raise Exception() window_left_sizes = [m - 1 + k", "only makes sense to speak of modifying mbr if we plan on keeping", "node.getParent().getChildren())) == False else \"+\" overall_str_list = [curr_leaf_status] else: overall_str_list = [] for", "next_root_entry.setChild(next_root) self.setRootEntry(next_root_entry) node1.setParent(next_root) node2.setParent(next_root) else: parent.removeEntry(node.getEntry()) parent.addEntry(entry1) parent.addEntry(entry2) # print \"split #2\" return", "entry): m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries() axis = RTree.rstarChooseSplitAxis(E_overall,", "upper_constituent_mbr_list_pairs] upper_margin_values = [x[0].getMarginValue() + x[1].getMarginValue() for x in upper_mbr_pairs] upper_margin_value_sum = sum(upper_margin_values)", "(120, 120) \"\"\" denominator = (100 * math.log(100, 2)) ** (1 / 3.0)", "ee] next_root = RTreeNode(None, resulting_entries_from_split, False, self.getRootEntry()) l.setParent(next_root) ll.setParent(next_root) self.getRootEntry().setChild(next_root) else: pass MAX_OVERLAP_RATIO", "in tagged_mbr_list if MBR.doOverlap(curr_mbr, x[0]) == True] for tagged_overlapped_mbr in tagged_overlapped_mbr_list: curr_mbr, curr_entry", "node = RTreeNode(None, [], True) entry = RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry) for i", "* 0.2) / 2 next_x1, next_y1 = (multiplier * x1 + offset, multiplier", "follow = self.chooseSubtree(entry, node).getChild() result = self.xtreeInsertHelper(entry, follow) split_status, added_nodes = result curr_entry", "resulting_entries_from_split, have_resulting_second_entry_from_split) @staticmethod def rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split): if node.getParent() == None: entry", "= CompositeMBR(None, None, None) root_entry = RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) def getRootEntry(self): return", "Q.append(node) # raise Exception() if node.getNumChildren() <= 1: # raise Exception() node.setToSuperNode(False) elif", "[e, ee], True) else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False) else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(),", "self.getUpperLeft() lower_right = self.getLowerRight() if self.getDimension() == 0: raise Exception() if self.getDimension() ==", "else: split_result = self.rstarSplitNode(leaf_node, entry) l, ll, e, ee = split_result adjust_result =", "(110, 200, 100), point4) node4 = RTreeNode(None, [], True) entry4 = RTreeEntry(mbr4, node4)", "right_value2 if component_does_enclose == False: does_enclose = False break return does_enclose def isEqualTo(self,", "if x[0] == min_combined_area_value] next_next_candidates = [x[1] for x in matching_combined_area_tagged_next_candidate_distributions] chosen_distribution_pair =", "if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\" chosen_entry = candidate_entries[0] chosen_child", "entry in entries: tree2.insert(entry) \"\"\" if entry.getChild().getParent() == None: raise Exception() \"\"\" #", "parent != None and (node in parent.getChildren()): pass \"\"\" node1 = RTreeNode(parent, entry_group1,", "def __init__(self, upper_left, lower_right, contained_item): MBR.__init__(self, upper_left, lower_right) self.contained_item = contained_item def isRaw(self):", "node.addEntry(entry) curr_node = entry.getChild() curr_node.setParent(node) mbr = CompositeMBR.makeMBR([entry.getMBR()]) node.getEntry().setMBR(mbr) # print \"no split\"", "window_left_sizes[i]) for i in range(len(window_left_sizes))] window_size_pairs = [x for x in window_size_pairs if", "def setRootEntry(self, root_entry): self.root_entry = root_entry def hasConsistentNonTraditionalLeafDepthValues(self): root = self.getRootEntry().getChild() curr_node =", "l = e.getChild() ll = ee.getChild() if (self.getRootEntry().getChild().getNumEntries() + 1) <= self.getRootEntry().getChild().getMaximumNumEntriesPerNode(): self.getRootEntry().getChild().addEntry(ee)", "56.672 sec. for pypy with m = 8 and M = 16 #", "def getEnlargedMBR(base_mbr, mbr): mbr_list = [base_mbr, mbr] upper_left_points = [x.getUpperLeft() for x in", "in entry_collection2] mbr1 = CompositeMBR.makeMBR(mbr_collection1) mbr2 = CompositeMBR.makeMBR(mbr_collection2) overlap_area = MBR.findOverlapArea(mbr1, mbr2) area1", "= CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) self.condenseTreeHelper(node.getParent(), Q) return # not tested # returns entries #", "if parent != None and (node in parent.getChildren()): pass \"\"\" node1 = RTreeNode(parent,", "[m - 1 + k for k in range(1, M - 2 *", "0: if mbr1.isEqualTo(mbr2) == True: overlap_ratio = 1 else: overlap_ratio = 0 else:", "= min(enlargement_values) candidate_tagged_enlargement_values = [x for x in tagged_enlargement_values if x[0] == min_enlargement_value]", "[entry1, entry2], False) self.getRootEntry().setChild(next_root) next_root.setEntry(self.getRootEntry()) node1.setParent(next_root) node2.setParent(next_root) pass return (node1, node2, entry1, entry2)", "def xtreeInsert(self, entry): # print \"insert\" return self.xtreeInsertHelper(entry, self.getRootEntry().getChild()) SPLIT = 0 SUPERNODE", "= self.getContainedItem() mbr = RawMBR(upper_left, lower_right, contained_item) return mbr def doesMatch(self, mbr): upper_left_matches", "heap, point, TopicKNearest, k): def getRectangleCloseDescendantsHelper(self, heap, reference_mbr, result_entry_list, ignore_entry): conflict_x_tree = RTree()", "= heapq.heappop(heap) entry = item node = entry.getChild() mbr = entry.getMBR() if mbr.doesEnclose(reference_mbr)", "2) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(entry) parent.addEntry(partner_entry) entry.getChild().setParent(parent) partner_entry.getChild().setParent(parent) return tree.adjustTree(tree, parent, [entry], False, False)", "2)) ** (1 / 3.0) / denominator)) # for n = 1000 #", "\") + \")\" return overall_str def toString(self): root = self.getRootEntry().getChild() return self.toStringHelper(root) def", "RTreeEntry(mbr, node) node.setEntry(entry) # entries.append(entry) \"\"\" # for entry in entries[0 : 4]:", "= ul_lr_pairs[i][0] # lower_right = ul_lr_pairs[i][1] # x = int(random.randint(1, 100)) # y", "== True: if node == self.getRootEntry().getChild(): return node else: return node.getParent() else: entries", "self.heap = [] def push(self, item, priority): pair = (priority,item) heapq.heappush(self.heap,pair) def pop(self):", "adjust_result = RTree.rstarAdjustTree(self, l, [e, ee], True) ended_with_split2, resulting_entries_from_split = adjust_result if ended_with_split2", "order of low-level to high-level; # wish to insert using order of high-level", "super-node if necessary if node.isSuperNode() == True and node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): node.setToSuperNode(False) if", "im = Image.new(\"RGB\", (768, 768), \"white\") draw = ImageDraw.Draw(im) root = self.getRoot() root.draw(self,", "be found for a delete\") # if parent has zero entries after removing", "RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False) else: split_result = tree.rstarSplitNode(parent, partner_entry) l, ll, e, ee", "True: self.doEnclosureQueryHelper(mbr, curr_entry, partial_result) def doEnclosureQueryWithEarlyStopping(self, mbr): result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, self.getRootEntry()) return result", "# do_overlap = True if without_borders == True: do_overlap = do_overlap and comp_a1", "entries = [] # lower_rights = [(3, 10, 10), (1, 10, 10), (8,", "entries after removing this entry, this should be okay leaf_node.removeEntry(entry) self.condenseTree(leaf_node) # root", "372), (761, 1089, 594)), \\ ((294, 238, 1036), (785, 378, 1963)), \\ ((803,", "tree.insert(entry3) mbr4 = RawMBR(point4, (110, 200, 100), point4) node4 = RTreeNode(None, [], True)", "= 1 else: overlap_ratio = 0 else: overlap_ratio = overlap_area / (1.0 *", "True and node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): node.setToSuperNode(False) if node.getParent() == None: # we are", "never split a super-node # updated on 2016-08-23 to fix traditional/non-traditional isLeafNode() distinction", "(20, 20) lower_right = (40, 40) elif i % 4 == 2: upper_left", "continue if conflict_x_tree.doEnclosureQueryWithEarlyStopping(mbr) == True: # ignore node if enclosing mbr exists in", "if entry.getMBR().doesEnclose(mbr) == True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for curr_entry in entries:", "100, 0) point4 = (60, 100, 0) point5 = (70, 100, 0) point6", "self.getUpperLeft() lower_right = self.getLowerRight() contained_item = self.getContainedItem() mbr = RawMBR(upper_left, lower_right, contained_item) return", "0, 32767) elif color_choice == 1: color = PythonMagick.Color(0, 0, 65535, 32767) elif", "if node.isSuperNode() == True: # print \"supernode encountered\" parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) Q.append(node)", "pass # print tree.toString() result = tree.getRectangleCloseDescendants(entry8) print result result = tree.getAllRectangleCloseAncestors() print", "result_mbr_list = base_mbr.getMBRList() + [mbr] mbr = CompositeMBR(upper_left_point, lower_right_point, result_mbr_list) return mbr @staticmethod", "x.isSuperNode() == True], tree.getRootEntry().getChild() # tree2.draw() print len(tree2.getNodes()) import time time1 = time.time()", "mbr_list = [x.getMBR() for x in entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) \"\"\" def rstarSplitNode(self,", "node): entries = node.getEntries() candidate_entries = None # if node.isLeafNode() == True: candidate_entries", "((294, 238, 1036), (785, 378, 1963)), \\ ((803, 1054, 307), (1776, 1597, 501)),", "= None result1 = self.xtreeTopologicalSplit(node, entry) entry_collection1, entry_collection2, dimension = result1 mbr_collection1 =", "node.isUnderfull() == True: \"\"\" if node.isUnderfull() == True: # print \"underfull\" parent =", "we never split a super-node if node.isSuperNode() == True: # raise Exception() return", "RTreeEntry(mbr8, node8) node8.setEntry(entry8) # problem here tree.insert(entry8) print tree.toString() print tree.doEnclosureQuery(curr_mbr2) curr_mbr3 =", "rectangle:\", entry.getMBR().toString() for close_ancestor_entry in close_ancestor_entry_list: print \"close ancestor:\", close_ancestor_entry.getMBR().toString() if __name__ ==", "base_mbr_area = base_mbr.getArea() enlarged_mbr = MBR.getEnlargedMBR(base_mbr, mbr) enlarged_mbr_area = enlarged_mbr.getArea() area_change = enlarged_mbr_area", "== 0: upper_left = (0, 0) lower_right = (10, 10) elif i %", "len(internal_node_stack_deque) != 0 or len(heap) != 0: # entry = entry_pq.pop() item =", "k = int(round((14500 * math.log(14500, 2)) ** (1 / 3.0) / denominator)) #", "for x in component_mbr_list] lower_right_points = [x.getLowerRight() for x in component_mbr_list] points =", "0 if curr_mbr_is_contained == True else 1 second_priority_component = (-1 if curr_mbr_is_contained ==", "def rstarChooseSplitAxis(entries, M, m): result = RTree.rstarGenDistributions(entries, M, m) S_comp_dict = {} for", "True: # could have a safe path to a leaf where the leaf", "curr_entry = tagged_overlapped_mbr curr_node = curr_entry.getChild() result = self.findLeafHelper(entry, curr_node) if result ==", "717, 1487)), \\ ((660, 268, 962), (1293, 619, 1521)), \\ ((798, 928, 1028),", "<reponame>bzliu94/algorithms<gh_stars>0 # 2016-08-21 # x-tree featuring enclosure and containment queries # dimension is", "ignore node if enclosing mbr exists in conflict x-tree continue if entry ==", "if is_root_node == False: overall_str_list = [node.getEntry().getMBR().toString()] # overall_str_list = [node.getEntry().getMBR().toString(), str(node)] else:", "!= 0: item = internal_node_stack_deque.popleft() # (priority,item) = heapq.heappop(heap) entry = item node", "521), (1314, 717, 1487)), \\ ((660, 268, 962), (1293, 619, 1521)), \\ ((798,", "area_change = enlarged_mbr_area - base_mbr_area return area_change @staticmethod def doOverlap(mbr_a, mbr_b, without_borders =", "False): partial_result = [] self.doOverlapQueryHelper(mbr, self.getRootEntry(), partial_result, without_borders) return partial_result def doOverlapQueryHelper(self, mbr,", "entry in entries: curr_node = entry.getChild() node.addEntry(entry) # needed this curr_node.setParent(node) \"\"\" entries", "center_y = next_y + offset radius = 2 perimeter_x = next_x + offset", "leaf_node.addEntry(entry) entry.getChild().setParent(leaf_node) adjust_result = RTree.rstarAdjustTree(self, leaf_node, [entry], False) else: split_result = self.rstarSplitNode(leaf_node, entry)", "node.getParent() else: entries = node.getEntries() candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries) != 1:", "= mbr next_mbr = raw_mbr.clone() next_node = RTreeNode(None, [], True) next_entry = RTreeEntry(next_mbr,", "comp_b2) - max(comp_a1, comp_b1)) sides.append(side) intersection_volume = reduce(lambda x, y: x * y,", "added_nodes: node.addEntry(added_node.getEntry()) added_node.setParent(node) if split_status == RTree.SPLIT: # added_node.setParent(node) if node.getNumChildren() > node.getMaximumNumEntriesPerNode():", "else: entries = entry.getChild().getEntries() for curr_entry in entries: if curr_entry.getMBR().doesEnclose(mbr) == True: self.doEnclosureQueryHelper(mbr,", "node.getParent() == None: # we are a root node if self.getRootEntry().getChild().getNumChildren() == 0:", "partner_entry = second_entry if have_resulting_second_entry_from_split == True: partner_node = partner_entry.getChild() partner_entries = partner_node.getEntries()", "if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) chosen_entry = candidate_entries[0] chosen_child =", "upper_left, lower_right, contained_item): MBR.__init__(self, upper_left, lower_right) self.contained_item = contained_item def isRaw(self): return True", "entries] tagged_overlapped_mbr_list = [x for x in tagged_mbr_list if MBR.doOverlap(curr_mbr, x[0]) == True]", "upper_left = ul_lr_pairs[i][0] # lower_right = ul_lr_pairs[i][1] # x = int(random.randint(1, 100)) #", "268, 962), (1293, 619, 1521)), \\ ((798, 928, 1028), (1762, 1795, 1309)), \\", "color_choice = depth % 3 color = None if color_choice == 0: color", "RTree: def __init__(self): root_node = RTreeNode(None, [], True) root_mbr = CompositeMBR(None, None, None)", "mbr1 = RawMBR(point1, (110, 200, 100), point1) node1 = RTreeNode(None, [], True) entry1", "= RTree.rstarGenDistributions(entries, M, m) candidate_distributions = None candidate_distributions = result[axis][0] + result[axis][1] mbr_list_pair_tagged_candidate_distributions", "n is number of actual rectangles or leaves; # assumes that rectangles are", "partner_entry.getChild().setParent(parent) return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False) else: split_result = tree.rstarSplitNode(parent, partner_entry) l, ll,", "for k in range(1, M - 2 * m + 2 + 1)]", "= self.getEntries() tagged_mbr_list = [(x.getMBR(), x) for x in entries] tagged_overlapped_mbr_list = [x", "# updated on 2016-11-16 to fix margin calculation # note that we assume", "return else: entry = self.getRootEntry() curr_entries = entry.getChild().getEntries() children = [x.getChild() for x", "parent has zero entries after removing this entry, this should be okay leaf_node.removeEntry(entry)", "3.0) / denominator)) # for n = 1000 # k = int(round((5500 *", "= curr_entry # pair = (priority,item) # if curr_mbr.doesEnclose(reference_mbr) == True or reference_mbr.doesEnclose(curr_mbr)", "entry2], None, next_root_entry) next_root_entry.setChild(next_root) self.setRootEntry(next_root_entry) node1.setParent(next_root) node2.setParent(next_root) else: parent.removeEntry(node.getEntry()) parent.addEntry(entry1) parent.addEntry(entry2) # print", "0) point6 = (80, 100, 0) point7 = (90, 100, 0) point8 =", "a point\") return mbr.getUpperLeft() def getVec(self): return self.vec def getComponent(self, d): return self.getVec()[d]", "for entry in entries[0 : 4]: # print \"supernodes:\", [x for x in", "curr_entries = node.getEntries() entry = None if node.getParent() == None: entry = tree.getRootEntry()", "root node if self.getRootEntry().getChild().getNumChildren() == 0: root_node = RTreeNode(None, [], True) root_mbr =", "print tree.toString() # tree.delete(entry8) # tree.insert(entry1) \"\"\" tree.delete(entry1) tree.delete(entry2) tree.delete(entry3) tree.delete(entry4) tree.delete(entry5) tree.delete(entry6)", "\"(\" + string.join(overall_str_list, \" \") + \")\" return overall_str def toLeafStatusString(self): root =", "[] if node.getNumChildren() == 0 else [node.getEntry().getMBR().toString(), str(node)] for entry in entries: child", "return (True, entry_collection1, entry_collection2, dimension) def xtreeTopologicalSplit(self, node, entry): m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M", "** (1 / 3.0) / denominator)) # for n = 5500 # k", "high-level; # wish to insert using order of high-level to low-level # Q", "and comp_a2 >= comp_b1 if do_overlap == False: break return do_overlap @staticmethod def", "next_x + offset perimeter_y = next_y + offset + radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x,", "we initially insert parent = node.getParent() entry1 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection1), None) node1 = RTreeNode(parent,", "# problem here tree.insert(entry8) print tree.toString() print tree.doEnclosureQuery(curr_mbr2) curr_mbr3 = RawMBR((50, 100, 0),", "# use as priority (prefer_contained, prefer_large_area_if_contained_else_small) if self.getRootEntry().getChild().getNumChildren() == 0: return [] reference_mbr", "= split_result if was_successful == True: mbr_collection1 = [x.getMBR() for x in entry_collection1]", "next_node.setEntry(next_entry) conflict_x_tree.insert(next_entry) elif node.isLeafNode() == False: # if we made it this far,", "node3) node3.setEntry(entry3) tree.insert(entry3) mbr4 = RawMBR(point4, (110, 200, 100), point4) node4 = RTreeNode(None,", "= RTreeNode(None, [], True) entry = RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry) for i in", "image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y)) else: image.strokeColor(color) image.fillColor(\"none\") image.strokeWidth(4) image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2)) if", "curr_depth + 1) if result == False: return False return True def toNumChildrenString(self):", "curr_mbr = curr_entry.getMBR() mbr_to_entry_dict[curr_mbr] = curr_entry mbr_list = [x.getMBR() for x in entries]", "else: entries = entry.getChild().getEntries() for curr_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), mbr) == True:", "reference_mbr.doesEnclose(root_mbr) root_mbr_area = root_mbr.getArea() first_priority_component = 0 if root_mbr_is_contained == True else 1", "2016-11-03 to re-structure and modify adjustTree(); # stop at root instead of non-existent", "isLeafNode() for this, as internal nodes can temporarily look like leaf nodes #", "== True: self.doEnclosureQueryHelper(mbr, curr_entry, partial_result) def doEnclosureQueryWithEarlyStopping(self, mbr): result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, self.getRootEntry()) return", "entry.draw(tree, entries, image, depth + 1) class MBR: def __init__(self, upper_left, lower_right): self.upper_left", "= \"(\" + string.join(overall_str_list, \" \") + \")\" return overall_str def toEntriesArePresentString(self): root", "m and (len(entries) - x) <= M and (len(entries) - x) >= m]", "tree.toString() # for entry in entries[0 : 4]: # print \"supernodes:\", [x for", "1521)), \\ ((332, 886, 493), (822, 1305, 1149)), \\ ((800, 709, 871), (1390,", "nodes # keep_nodes = [x for x in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() == True]", "True: do_overlap = do_overlap and comp_a1 < comp_b2 and comp_a2 > comp_b1 else:", "if len(matching_overlap_value_tagged_candidate_distributions) > 1: next_candidate_distributions = next_next_candidates mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for y in", "toEntriesArePresentString(self): root = self.getRootEntry().getChild() return self.toEntriesArePresentStringHelper(root) def toEntriesArePresentStringHelper(self, node): if node == None:", "= mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight() dimension =", "== True] for keep_node in keep_nodes: Q.append(keep_node) # only makes sense to speak", "\" \") + \")\" return overall_str def toDepthString(self): root = self.getRootEntry().getChild() return self.toDepthStringHelper(root,", "def adjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split, is_first_call_after_first_pass): if node == None: return (False, [])", "x1 = 0 y1 = 0 x2 = 47 y2 = 60 next_x1", "[root_entry] RTreeEntry.draw(self, entries, image, 0) \"\"\" image.strokeColor(\"orange\") image.fillColor(\"none\") image.strokeWidth(4) multiplier = 3 *", "True: # raise Exception() return (False, None, None, None) dimension = None result1", "curr_leaf_status = \"-\" if (node.getParent() == None or (node.getParent() != None and node", "entries[i] curr_mbr = curr_entry.getMBR() mbr_to_entry_dict[curr_mbr] = curr_entry mbr_list = [x.getMBR() for x in", "def doContainmentQuery(self, mbr): partial_result = [] self.doContainmentQueryHelper(mbr, self.getRootEntry(), partial_result) return partial_result def doContainmentQueryHelper(self,", "not enclose reference mbr # and associated mbr is not contained within reference", "== 0 for x in self.getEntries()]) return is_non_traditional_leaf_node \"\"\" def isTraditionalLeafNode(self): is_traditional_leaf_node =", "is implicit (determined using points sampled) and assumed to be consistent # we", "math.log(14500, 2)) ** (1 / 3.0) / denominator)) # for n = 14500", "raise Exception() if node.getNumChildren() <= 1: # raise Exception() node.setToSuperNode(False) elif node.getNumChildren() <=", "mbr_b, without_borders = False): upper_left_a = mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft()", "getNodesHelper(self, node, partial_result): partial_result.append(node) for curr_node in node.getChildren(): self.getNodesHelper(curr_node, partial_result) def getNodesForNode(self, node):", "RawMBR(curr_mbr.getUpperLeft(), curr_mbr.getLowerRight(), None) next_node = RTreeNode(None, [], True) next_entry = RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry)", "mbr): dimension = self.getDimension() does_enclose = True for i in xrange(dimension): left_value1 =", "explicitly for this case if reference_mbr.doesEnclose(mbr) == False: continue # kick out close", "math.log(5500, 2)) ** (1 / 3.0) / denominator)) # for n = 5500", "and assumed to be consistent # we never split a super-node # updated", "> RTree.MAX_OVERLAP_RATIO: # raise Exception() result2 = self.xtreeOverlapMinimalSplit(node, entry) entry_collection3, entry_collection4, dimension, do_fail", "n = 14500 # x1 = int(100 + random.randint(0, k) * 100) #", "# raise Exception() result_entry_list = [] self.getRectangleCloseDescendantsHelper(heap, reference_mbr, result_entry_list, reference_entry) return result_entry_list #", "0) or (self.getNumChildren() != 0 and False not in [x.getChild().getNumEntries() == 0 for", "((297, 196, 750), (1085, 718, 1259)), \\ ((808, 926, 151), (889, 1755, 320)),", "upper_left = self.getUpperLeft() lower_right = self.getLowerRight() sides = [] for i in xrange(self.getDimension()):", "[] for entry in entries: child = entry.getChild() child_str = self.toDepthStringHelper(child, depth +", "curr_entry.getMBR() mbr = entry.getMBR() next_mbr = MBR.getEnlargedMBR(curr_mbr, mbr) node.getEntry().setMBR(next_mbr) # this parent-setting step", "node.getSplitHistoryRootDimension() result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2 = result next_result =", "have negative area for i in xrange(mbr_a.getDimension()): # a \"left\" comp_a1 = min(upper_left_a[i],", "strange things going on - saturation occurs # if we increase n and", "10), (1, 10, 10), (3, 10, 10)] # for i in xrange(10): #", "\"\"\" def getUnionArea(self): pass \"\"\" # takes O(log(n)) time on average for start", "953, 196), (1776, 1662, 455)), \\ ((596, 892, 131), (1543, 1838, 669)), \\", "inter-group overlap means maximal disjointedness # is not going to be good enough", "return (False, None, None, None) dimension = None result1 = self.xtreeTopologicalSplit(node, entry) entry_collection1,", "10 # lower_right = (x, y, z) # lower_right = lower_rights[i] mbr =", "root = self.getRootEntry().getChild() return self.toDepthStringHelper(root, 0) def toDepthStringHelper(self, node, depth): if node ==", "if x[0] == min_area] candidate_entries = [x[1] for x in candidate_tagged_area_values] return candidate_entries", "entry): curr_child = entry.getChild() (self.child_to_entry_dict)[curr_child] = entry def removeEntry(self, entry): curr_child = entry.getChild()", "next_mbr = MBR.getEnlargedMBR(curr_mbr, mbr) node.getEntry().setMBR(next_mbr) # this parent-setting step is crucial # if", "[x.getChild() for x in curr_entries] mbr_list = [x.getMBR() for x in curr_entries] tight_overall_mbr", "2 * (y2 - y1) return margin surface_area = 0 for i in", "depth, curr_depth + 1) if result == False: return False return True def", "> comp_b1 else: do_overlap = do_overlap and comp_a1 <= comp_b2 and comp_a2 >=", "(100, 100, 0), 1)) tree = RTree() print tree.toString() curr_root = tree.getRootEntry().getChild() mbr1", "child = entry.getChild() child_str = self.toNumChildrenStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\"", "for x in tagged_area_values if x[0] == min_area] candidate_entries = [x[1] for x", "= partner_entry.getChild() partner_entries = partner_node.getEntries() partner_children = [x.getChild() for x in partner_entries] partner_mbr_list", "return partial_result def doOverlapQueryHelper(self, mbr, entry, partial_result, without_borders): if entry.getMBR().isRaw() == True: if", "break return does_enclose def isEqualTo(self, mbr): upper_left1 = self.getUpperLeft() lower_right1 = self.getLowerRight() upper_left2", "for 55x growth; expected 317x slower) # n = 10000 # 84.222 seconds", "for x in tagged_area_values] min_area = min(area_values) candidate_tagged_area_values = [x for x in", "upper_left = self.getUpperLeft() lower_right = self.getLowerRight() result = str(list(upper_left + lower_right) + [self.isRaw()])", "saturation, domain has to grow with n # n = 100 # 0.427", "def peek(self): heap = self.heap pair = heap[0] result = pair return result", "def __init__(self, upper_left, lower_right, id_value): self.upper_left = upper_left self.lower_right = lower_right self.id_value =", "entries: child = entry.getChild() child_str = self.toNumChildrenStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str =", "if True: # if node.getNumChildren() == 0 and node == self.getRootEntry().getChild(): # if", "170.053 seconds (~398x slower for 145x growth; expected 1040x slower) # n =", "True: if MBR.doOverlap(entry.getMBR(), mbr, without_borders) == True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for", "in xrange(len(window_left_sizes))] upper_sorted_entries = entries[ : ] upper_sorted_entries.sort(key = lambda x: x.getMBR().getLowerRight()[i]) upper_distributions", "entry.getMBR()) == True: result = self.findLeafHelper(entry, next_entry) if result == True: return result", "comp_2b - comp_2a term = 2 * term1 * term2 surface_area += term", "= [m - 1 + k for k in range(1, M - 2", "RawMBR(upper_left, lower_right, None) node = RTreeNode(None, [], True) entry = RTreeEntry(mbr, node) node.setEntry(entry)", "entries] mbr = entry.getMBR() tagged_enlargement_values = [(MBR.getAreaEnlargement(x, mbr), x) for x in mbr_list]", "((800, 709, 871), (1390, 1402, 1548)), \\ ((433, 499, 483), (1300, 1330, 1055))]", "lower-right's in (ul_i, ul_i + 10000) # two strange things going on -", "delete(self, entry): # print \"hello\" did_find_leaf = self.findLeaf(entry) child_node = entry.getChild() # root", "entries # does intersection query def doOverlapQuery(self, mbr, without_borders = False): partial_result =", "for entry in entries: child = entry.getChild() child_str = self.toStringHelper(child) curr_str = child_str", "= upper_left_a[i] comp_a2 = lower_right_a[i] comp_b1 = upper_left_b[i] comp_b2 = lower_right_b[i] side =", "start_rectangle_to_close_ancestor_entries_dict def draw(self): # im = Image.new(\"RGB\", (512, 512), \"white\") \"\"\" im =", "tree.splitNode(parent, partner_entry) l, ll, e, ee = split_result return tree.adjustTree(tree, l, [e, ee],", "if x[1] == min_S_value] chosen_d_S_pair = min_S_value_d_S_pair_candidates[0] chosen_d_value = chosen_d_S_pair[0] return chosen_d_value @staticmethod", "0 SUPERNODE = 1 NO_SPLIT = 2 def xtreeInsertHelper(self, entry, node): split_status =", "# tree.delete(entry8) # tree.insert(entry1) \"\"\" tree.delete(entry1) tree.delete(entry2) tree.delete(entry3) tree.delete(entry4) tree.delete(entry5) tree.delete(entry6) tree.delete(entry7) tree.delete(entry8)", "points sampled) and assumed to be consistent # we never split a super-node", "[node.getEntry().getMBR().toString(), str(node)] else: overall_str_list = [] if node.getNumChildren() == 0 else [node.getEntry().getMBR().toString()] #", "if node.getParent() == None: entry = tree.getRootEntry() curr_entries = entry.getChild().getEntries() children = [x.getChild()", "= next_next_candidates mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for y in x[0]], [y.getMBR() for y in", "len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\" chosen_entry = candidate_entries[0] chosen_child =", "= max(components) min_components.append(min_comp_value) max_components.append(max_comp_value) upper_left_point = tuple(min_components) lower_right_point = tuple(max_components) result_mbr_list = base_mbr.getMBRList()", "x[1]]), x) for x in candidate_distributions] mbr_pair_tagged_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for x", "mbr_list = [entry.getMBR()] for mbr in mbr_list: upper_left = mbr.getUpperLeft() lower_right = mbr.getLowerRight()", "[(x[0].getArea(), x[1]) for x in tagged_mbr_list] area_values = [x[0] for x in tagged_area_values]", "curr_mbr = curr_entry.getMBR() mbr = entry.getMBR() next_mbr = MBR.getEnlargedMBR(curr_mbr, mbr) node.getEntry().setMBR(next_mbr) # this", "= int(x1 + random.random() * 100) # y2 = int(y1 + random.random() *", "= curr_entry.getChild() (self.child_to_entry_dict)[curr_child] = curr_entry self.split_history_root_dimension = split_history_root_dimension self.is_supernode = is_supernode self.entry =", "# if node.getNumChildren() == 0 and node == self.getRootEntry().getChild(): # if node.getNumChildren() ==", "= min(upper_left_a[i], lower_right_a[i]) # a \"right\" comp_a2 = max(upper_left_a[i], lower_right_a[i]) # b \"left\"", "= mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight() do_overlap = True # assume that rectangles never", "found for a delete\") # if parent has zero entries after removing this", "= min([x[1] for x in d_S_pairs]) min_S_value_d_S_pair_candidates = [x for x in d_S_pairs", "curr_entry, partial_result) # prefix order def getNodes(self): node_list = [] self.getNodesHelper(self.getRootEntry().getChild(), node_list) return", "501)), \\ ((803, 233, 521), (1314, 717, 1487)), \\ ((660, 268, 962), (1293,", "0) point4 = (60, 100, 0) point5 = (70, 100, 0) point6 =", "node, resulting_entries_from_split, have_resulting_second_entry_from_split): return tree.rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split) @staticmethod def rstarAdjustTreeHelper(tree, node, resulting_entries_from_split,", "self.getDimension() == 1: x1 = upper_left[0] x2 = lower_right[0] margin = x2 -", "for x in mbr_list] points = upper_left_points + lower_right_points min_components = [] max_components", "mbr.getLowerRight() result = upper_left_matches == True and lower_right_matches == True return result class", "for j in xrange(len(window_left_sizes))] upper_sorted_entries = entries[ : ] upper_sorted_entries.sort(key = lambda x:", "1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\" else: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries)", "tagged_area_values if x[0] == min_area] candidate_entries = [x[1] for x in candidate_tagged_area_values] return", "= random.randint(0, 10000) # upper_left = (x1, y1, z1) # lower_right = (x2,", "slower for 1x growth; expected 1x slower) # n = 1000 # 1.1649", "lower_right, None) node = RTreeNode(None, [], True) entry = RTreeEntry(mbr, node) node.setEntry(entry) #", "8 self.M = 16 self.child_to_entry_dict = {} for curr_entry in entries: curr_child =", "else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(), resulting_entries_from_split, have_resulting_second_entry_from_split) \"\"\" @staticmethod def adjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split,", "result = RTree.rstarGenDistributions(entries, M, m) S_comp_dict = {} for i in xrange(entries[0].getMBR().getDimension()): low_comp_distributions,", "self.getRootEntry()) def findLeafHelper(self, entry, curr_entry): \"\"\" if node.isLeafNode() == False: curr_mbr = entry.getMBR()", "1055))] \"\"\" # n = 10,000 works in 1 min. 54 sec. for", "elif i % 4 == 1: upper_left = (20, 20) lower_right = (40,", "= dim def getParent(self): return self.parent def getEntries(self): return (self.child_to_entry_dict).values() def getEntryForChild(self, child_node):", "raise Exception() # for entry in entries[0 : 15]: for entry in entries:", "(first_priority_component, second_priority_component) priority_tagged_internal_entry = (priority, curr_entry) priority_tagged_internal_entries.append(priority_tagged_internal_entry) # item = curr_entry # pair", "elif node.isUnderfull() == True: \"\"\" if node.isUnderfull() == True: # print \"underfull\" parent", "y1 = int(100 + random.randint(0, k) * 100) # z1 = int(100 +", "doOverlapQueryHelper(self, mbr, entry, partial_result, without_borders): if entry.getMBR().isRaw() == True: if MBR.doOverlap(entry.getMBR(), mbr, without_borders)", "items = [x[1] for x in pair_list] return items def getSize(self): return len(self.heap)", "the reason is that # otherwise the directed graph implied by the r-tree", "priority_tagged_internal_entry item = internal_entry internal_node_stack_deque.appendleft(item) # print \"conflict x-tree:\", conflict_x_tree.toString() # for a", "- saturation occurs # if we increase n and do not increase domains", "tree.rstarSplitNode(parent, partner_entry) l, ll, e, ee = split_result return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [e, ee],", "[x for x in combined_area_tagged_next_candidate_distributions if x[0] == min_combined_area_value] next_next_candidates = [x[1] for", "(785, 378, 1963)), \\ ((803, 1054, 307), (1776, 1597, 501)), \\ ((803, 233,", "time, # where n is number of actual rectangles or leaves; # assumes", "for x in d_S_pairs if x[1] == min_S_value] chosen_d_S_pair = min_S_value_d_S_pair_candidates[0] chosen_d_value =", "= curr_x_tree.getUnionArea() multi_overlap_ratio = overlap_area_sum / (1.0 * union_area) if multi_overlap_ratio <= RTree.MAX_OVERLAP_RATIO:", "entry.getChild().setParent(parent) partner_entry.getChild().setParent(parent) return tree.adjustTree(tree, parent, [entry], False, False) else: parent.addEntry(entry) entry.getChild().setParent(parent) split_result =", "lambda x: x.getMBR().getLowerRight()[i]) upper_distributions = [(upper_sorted_entries[ : window_left_sizes[j]], upper_sorted_entries[window_left_sizes[j] : ]) for j", "False: leaf_node.addEntry(entry) entry.getChild().setParent(leaf_node) adjust_result = RTree.rstarAdjustTree(self, leaf_node, [entry], False) else: split_result = self.rstarSplitNode(leaf_node,", "** 2) return distance class RTreeNode: def __init__(self, parent, entries, is_leaf, entry =", "offset) next_x2, next_y2 = (multiplier * x2 + offset, multiplier * y2 +", "in next_candidate_distributions] mbr_pair_tagged_next_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for x in mbr_list_pair_tagged_candidate_distributions] combined_area_tagged_next_candidate_distributions =", "if len(tree.getNodes()) != 0: # print \"removing entry with mbr:\", entry.getMBR().toString() # print", "100, 0), (110, 200, 100), None) print tree.doContainmentQuery(curr_mbr3) # raise Exception() print tree.doOverlapQuery(curr_mbr2)", "in self.getEntries()]) is_leaf_node = self.getNumChildren() == 0 return is_leaf_node def addEntry(self, entry): curr_child", "is not going to be good enough to cut down branches explored; #", "node.isNonTraditionalLeafNode() == False: # this is idempotent for added_node in added_nodes: node.addEntry(added_node.getEntry()) added_node.setParent(node)", "sec. for pypy with m = 2 and M = 4 # n", "10)] # for i in xrange(10): # for i in xrange(4): \"\"\" ul_lr_pairs", "return self.rstarChooseLeafHelper(entry, self.getRootEntry().getChild()) def rstarChooseLeafHelper(self, entry, node): if node.isLeafNode() == True: if node", "= [((797, 989, 602), (910, 1248, 1035)), \\ ((920, 974, 724), (1802, 1524,", "tree2.draw() print len(tree2.getNodes()) import time time1 = time.time() result = tree2.getAllRectangleCloseAncestors() time2 =", "y1 = point1 x2, y2 = point2 change_x = x2 - x1 change_y", "node if enclosing mbr exists in conflict x-tree continue if entry == ignore_entry:", "RTreeNode: def __init__(self, parent, entries, is_leaf, entry = None, split_history_root_dimension = None, is_supernode", "comp_a1 = min(upper_left_a[i], lower_right_a[i]) # a \"right\" comp_a2 = max(upper_left_a[i], lower_right_a[i]) # b", "in mbr_list_pair_tagged_candidate_distributions] combined_area_tagged_next_candidate_distributions = [(x[0][0].getArea() + x[0][1].getArea(), x[1]) for x in mbr_pair_tagged_next_candidate_distributions] combined_area_values", "CompositeMBR.makeMBR(mbr_collection2) overlap_area = MBR.findOverlapArea(mbr1, mbr2) area1 = mbr1.getArea() area2 = mbr2.getArea() union_area =", "# shorten tree entries = root.getEntries() chosen_entry = entries[0] chosen_child = chosen_entry.getChild() self.setRoot(chosen_child)", "worst; # assumes that rectangles are distinct # return a list of entries", "(0, 0) lower_right = (10, 10) elif i % 4 == 1: upper_left", "candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\" else: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries) !=", "getEnlargedMBR(base_mbr, mbr): mbr_list = [base_mbr, mbr] upper_left_points = [x.getUpperLeft() for x in mbr_list]", "tree.doEnclosureQuery(curr_mbr2) curr_mbr3 = RawMBR((50, 100, 0), (110, 200, 100), None) print tree.doContainmentQuery(curr_mbr3) #", "node2.setSplitHistoryRootDimension(dimension) if self.getRootEntry().getChild() == node: next_root_entry = RTreeEntry(next_mbr, None) next_root = RTreeNode(None, [entry1,", "occurs # if we increase n and do not increase domains and #", "x in mbr_list] enlargement_values = [x[0] for x in tagged_enlargement_values] min_enlargement_value = min(enlargement_values)", "m) entry_group1, entry_group2 = result next_result = (entry_group1, entry_group2, axis) return next_result def", "partner_mbr_list = [x.getMBR() for x in partner_entries] partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if node.isLeafNode()", "PythonMagick.Color(65535, 0, 0, 32767) elif color_choice == 1: color = PythonMagick.Color(0, 0, 65535,", "M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() axis = RTree.rstarChooseSplitAxis(E_overall, M, m) result = RTree.rstarChooseSplitIndex(E_overall, axis, M,", "parent-setting step is crucial # if node.isNonTraditionalLeafNode() == False: # this is idempotent", "self.getRootEntry().getChild().getMaximumNumEntriesPerNode() axis = RTree.rstarChooseSplitAxis(E_overall, M, m) result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1,", "self.toLeafStatusStringHelper(root) def toLeafStatusStringHelper(self, node): if node == None: return \"\" entries = node.getEntries()", "HyperRectangle: def __init__(self, upper_left, lower_right, id_value): self.upper_left = upper_left self.lower_right = lower_right self.id_value", "x = random.randint(0, 10000) y = random.randint(0, 10000) # upper_left = (x1, y1,", "= 1 # k = int(round(denominator / denominator)) # for n = 100", "x[1] == min_S_value] chosen_d_S_pair = min_S_value_d_S_pair_candidates[0] chosen_d_value = chosen_d_S_pair[0] return chosen_d_value @staticmethod def", "is_supernode def getSplitHistoryRootDimension(self): return self.split_history_root_dimension def setSplitHistoryRootDimension(self, dim): self.split_history_root_dimension = dim def getParent(self):", "= 47 y2 = 60 next_x1 = x1 * multiplier + offset next_y1", "== False: break return do_overlap @staticmethod def findOverlapArea(mbr_a, mbr_b): if MBR.doOverlap(mbr_a, mbr_b) ==", "lower_right_b[i]) # b \"right\" comp_b2 = max(upper_left_b[i], lower_right_b[i]) # print comp_a1, comp_a2, comp_b1,", "2.996 sec. for pypy with m = 2 and M = 4 #", "= self.getRoot() root.draw(self, draw, 0) im.save(\"tree.png\", \"PNG\") \"\"\" # image = PythonMagick.Image(PythonMagick.Geometry(\"768x768\"), \"white\")", "branches explored; # to counter saturation, domain has to grow with n #", "= [] if len(entries) > (M + 1): raise Exception() window_left_sizes = [m", "the leaf mbr # is not contained by reference rectangle; # check explicitly", "queries # dimension is implicit (determined using points sampled) and assumed to be", "= None if color_choice == 0: color = PythonMagick.Color(65535, 0, 0, 32767) elif", "None \"\"\" if i % 4 == 0: upper_left = (0, 0) lower_right", "node.isSuperNode() == True: # print \"supernode encountered\" parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) Q.append(node) #", "+ offset, multiplier * y2 + offset) if depth != 0: pass color_choice", "# lower_right = (x2, y2, z2) upper_left = (x, y) lower_right = (x,", "== min_S_value] chosen_d_S_pair = min_S_value_d_S_pair_candidates[0] chosen_d_value = chosen_d_S_pair[0] return chosen_d_value @staticmethod def rstarChooseSplitIndex(entries,", "return tree.rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split) @staticmethod def rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split): if node.getParent()", "mbr, without_borders = False): partial_result = [] self.doOverlapQueryHelper(mbr, self.getRootEntry(), partial_result, without_borders) return partial_result", "2 and M = 4 # n = 1,000 works in 3.428 sec.", "== True: first_entry, second_entry = resulting_entries_from_split partner_entry = second_entry if have_resulting_second_entry_from_split == True:", "rstarPreadjustTree(self, leaf_node): node = leaf_node parent = node.getParent() if parent != None: curr_entries", "the case when we initially insert parent = node.getParent() entry1 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection1), None)", "growth; expected 1040x slower) # n = 20000 # 230.0411 seconds (~538x slower", "+ string.join(overall_str_list, \" \") + \")\" return overall_str def chooseEntriesWithMinimalOverlapEnlargement(self, entries, entry): mbr_to_entry_dict", "(False, []) \"\"\" # assume item is in tree # returns a node,", "= 20000 n = 1000 import math for i in xrange(n): upper_left =", "def toString(self): return str(self.getEntries()) class RTreeEntry: def __init__(self, mbr, child): self.mbr = mbr", "0 else [node.getEntry().getMBR().toString(), str(node)] for entry in entries: child = entry.getChild() child_str =", "2 + 1)] window_left_sizes = [x for x in window_left_sizes if x <=", "== True: result = self.findLeafHelper(entry, next_entry) if result == True: return result return", "item = internal_entry internal_node_stack_deque.appendleft(item) # print \"conflict x-tree:\", conflict_x_tree.toString() # for a well-formed", "for x in upper_constituent_mbr_list_pairs] upper_margin_values = [x[0].getMarginValue() + x[1].getMarginValue() for x in upper_mbr_pairs]", "= 60 next_x1 = x1 * multiplier + offset next_y1 = y1 *", "def getUpperLeft(self): return self.upper_left def getLowerRight(self): return self.lower_right def getArea(self): upper_left = self.getUpperLeft()", "return result return False def delete(self, entry): # print \"hello\" did_find_leaf = self.findLeaf(entry)", "when to attempt an overlap-minimal split # updated on 2016-11-03 to re-structure and", "tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) @staticmethod def rstarAdjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split): return tree.rstarAdjustTreeHelper(tree, node,", "fix traditional/non-traditional isLeafNode() distinction # updated on 2016-08-25 to fix overlap logic for", "y in x[0]], [y.getMBR() for y in x[1]]) for x in upper_comp_distributions] upper_mbr_pairs", "in node.getParent().getChildren())) == False else \"+\" overall_str_list = [curr_leaf_status] else: overall_str_list = []", "term1 * term2 surface_area += term margin = surface_area return margin def toString(self):", "S_comp_dict.items() min_S_value = min([x[1] for x in d_S_pairs]) min_S_value_d_S_pair_candidates = [x for x", "self.getRootEntry() else None if leaf_node == None: raise Exception(\"expected a node to be", "self.m = 8 self.M = 16 self.child_to_entry_dict = {} for curr_entry in entries:", "self.getEntries() tagged_mbr_list = [(x.getMBR(), x) for x in entries] tagged_overlapped_mbr_list = [x for", "= (0, 0) lower_right = (10, 10) mbr = RawMBR(upper_left, lower_right, None) node", "[entry], False, False) else: parent.addEntry(entry) entry.getChild().setParent(parent) split_result = tree.splitNode(parent, partner_entry) l, ll, e,", "node, depth, curr_depth): if node == None: return elif node.isLeafNode() == True: if", "Point.toPoint(mbr) x, y = location multiplier = 1 / (1.0 * 6.5) *", "non-point mbr to a point\") return mbr.getUpperLeft() def getVec(self): return self.vec def getComponent(self,", "= curr_entry.getChild().getEntries() for next_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), entry.getMBR()) == True: result =", "have_resulting_second_entry_from_split == True: parent.removeEntry(entry) if (parent.getNumChildren() + 2) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(entry) parent.addEntry(partner_entry) entry.getChild().setParent(parent)", "setChild(self, node): self.child = node @staticmethod def draw(tree, entries, image, depth): for entry", "node.addEntry(entry) # needed this curr_node.setParent(node) \"\"\" entries = node.getEntries() mbr_list = [x.getMBR() for", "node.isLeafNode() == False: curr_mbr = entry.getMBR() entries = self.getEntries() tagged_mbr_list = [(x.getMBR(), x)", "algorithm condenseTree(L) # if the root has only one child (and it is", "Exception() if do_fail == True or len(entry_collection3) < node.getMinimumNumEntriesPerNode() or len(entry_collection4) < node.getMinimumNumEntriesPerNode():", "i in xrange(1000): upper_left = (0, 0) lower_right = (10, 10) mbr =", "ul_lr_pairs = [((797, 989, 602), (910, 1248, 1035)), \\ ((920, 974, 724), (1802,", "0) point5 = (70, 100, 0) point6 = (80, 100, 0) point7 =", "area @staticmethod def getEnlargedMBR(base_mbr, mbr): mbr_list = [base_mbr, mbr] upper_left_points = [x.getUpperLeft() for", "return self.getNumEntries() < self.getMinimumNumEntriesPerNode() def retrieveEntryForChild(self, node): return (self.child_to_entry_dict)[node] def toString(self): return str(self.getEntries())", "rectangles or leaves; # assumes that rectangles are distinct def getAllRectangleCloseAncestors(self): start_rectangle_nodes =", "mbr_a.getDimension() sides = [] for i in xrange(dimension): comp_a1 = upper_left_a[i] comp_a2 =", "entry): return self.findLeafHelper(entry, self.getRootEntry()) def findLeafHelper(self, entry, curr_entry): \"\"\" if node.isLeafNode() == False:", "slower for 100x growth; expected 664x slower) # n = 14500 # 170.053", "entry.getChild() curr_node.setParent(node) mbr = CompositeMBR.makeMBR([entry.getMBR()]) node.getEntry().setMBR(mbr) # print \"no split\" return (RTree.NO_SPLIT, [node])", "for curr_entry in entry_group1: curr_entry.getChild().setParent(node1) for curr_entry in entry_group2: curr_entry.getChild().setParent(node2) mbr_group1 = [x.getMBR()", "node.getEntry() curr_mbr = curr_entry.getMBR() mbr = entry.getMBR() next_mbr = MBR.getEnlargedMBR(curr_mbr, mbr) node.getEntry().setMBR(next_mbr) #", "\\ ((262, 221, 872), (500, 279, 1521)), \\ ((332, 886, 493), (822, 1305,", "in candidate_tagged_area_values] return candidate_entries @staticmethod def rstarGenDistributions(entries, M, m): result_list = [] if", "node.getParent().retrieveEntryForChild(node) children = [x.getChild() for x in curr_entries] mbr_list = [x.getMBR() for x", "# tree2.draw() print len(tree2.getNodes()) import time time1 = time.time() result = tree2.getAllRectangleCloseAncestors() time2", "growth; expected 317x slower) # n = 10000 # 84.222 seconds (~197x slower", "curr_entry in entries: base_mbr = curr_entry.getMBR() curr_mbr = MBR.getEnlargedMBR(base_mbr, mbr) tagged_mbr_list.append((curr_mbr, curr_entry)) tagged_area_values", "ImageDraw.Draw(im) root = self.getRoot() root.draw(self, draw, 0) im.save(\"tree.png\", \"PNG\") \"\"\" # image =", "False) self.getRootEntry().setChild(next_root) next_root.setEntry(self.getRootEntry()) node1.setParent(next_root) node2.setParent(next_root) pass return (node1, node2, entry1, entry2) @staticmethod def", "point, TopicKNearest, k): def getRectangleCloseDescendantsHelper(self, heap, reference_mbr, result_entry_list, ignore_entry): conflict_x_tree = RTree() internal_node_stack_deque", "\\ ((920, 974, 724), (1802, 1524, 1378)), \\ ((911, 953, 196), (1776, 1662,", "stop at root instead of non-existent parent of root; # also, we implement", "221, 872), (500, 279, 1521)), \\ ((332, 886, 493), (822, 1305, 1149)), \\", "= 10 # z = 10 # lower_right = (x, y, z) #", "(1762, 1795, 1309)), \\ ((225, 359, 290), (579, 950, 700)), \\ ((297, 196,", "works in 3.428 sec. for pypy with m = 8 and M =", "lower_right = (x, y, z) # lower_right = lower_rights[i] mbr = RawMBR(upper_left, lower_right,", "False): self.parent = parent self.is_leaf = is_leaf self.m = 8 self.M = 16", "self.mbr = mbr self.child = child def getMBR(self): return self.mbr def setMBR(self, mbr):", "# (priority,item) = heapq.heappop(heap) entry = item node = entry.getChild() mbr = entry.getMBR()", "else 1 second_priority_component = (-1 if curr_mbr_is_contained == True else 1) * curr_mbr_area", "y) lower_right = (x, y) # upper_left = ul_lr_pairs[i][0] # lower_right = ul_lr_pairs[i][1]", "= point2 change_x = x2 - x1 change_y = y2 - y1 distance", "- comp1 sides.append(side) area = reduce(lambda x, y: x * y, sides) return", "result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, self.getRootEntry()) return result def doEnclosureQueryWithEarlyStoppingHelper(self, mbr, entry): if entry.getMBR().isRaw() ==", "+ radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y)) children = [x.getChild() for x in entries]", "split_result = tree.splitNode(parent, partner_entry) l, ll, e, ee = split_result return tree.adjustTree(tree, l,", "curr_x_tree.getUnionArea() multi_overlap_ratio = overlap_area_sum / (1.0 * union_area) if multi_overlap_ratio <= RTree.MAX_OVERLAP_RATIO: node.setToSuperNode(False)", "0) point3 = (50, 100, 0) point4 = (60, 100, 0) point5 =", "= node.getEntry() curr_mbr = curr_entry.getMBR() mbr = entry.getMBR() next_mbr = MBR.getEnlargedMBR(curr_mbr, mbr) node.getEntry().setMBR(next_mbr)", "# note that M of two works import sys # import PythonMagick import", "== 0: return [] reference_mbr = reference_entry.getMBR() root_entry = self.getRootEntry() root_node = root_entry.getChild()", "random.randint(0, 10000) # upper_left = (x1, y1, z1) # lower_right = (x2, y2,", "= RTreeNode(None, [entry1, entry2], None, next_root_entry) next_root_entry.setChild(next_root) self.setRootEntry(next_root_entry) node1.setParent(next_root) node2.setParent(next_root) else: parent.removeEntry(node.getEntry()) parent.addEntry(entry1)", "= CompositeMBR.makeMBR(mbr_group2) for curr_entry in entry_group1: next_curr_node = curr_entry.getChild() if curr_entry != entry:", "+ 10000) # two strange things going on - saturation occurs # if", "far, we should add to conflict x-tree result_entry_list.append(entry) raw_mbr = mbr next_mbr =", "random.randint(0, k) * 100) # y1 = int(100 + random.randint(0, k) * 100)", "overall_str = \"(\" + string.join(overall_str_list, \" \") + \")\" return overall_str def toString(self):", "+ random.randint(0, k) * 100) # z1 = int(100 + random.randint(0, k) *", "None and node in node.getParent().getChildren())) == False else \"+\" overall_str_list = [curr_leaf_status] else:", "if node.getNumChildren() == 0 and node == self.getRootEntry().getChild(): # if node.getNumChildren() == 0:", "/ 3.0) / denominator)) # for n = 20000 # k = int(round((14500", "candidate_entries[0] chosen_child = chosen_entry.getChild() return self.rstarChooseLeafHelper(entry, chosen_child) def insert(self, entry): return self.xtreeInsert(entry) def", "= entry.getMBR() location = Point.toPoint(mbr) x, y = location multiplier = 1 /", "self.getLowerRight() contained_item = self.getContainedItem() mbr = RawMBR(upper_left, lower_right, contained_item) return mbr def doesMatch(self,", "= list(set(curr_node.getEntries() + [entry])) return self.rstarSplitNodeHelper(node, E_overall, entry) def rstarSplitNodeHelper(self, node, E_overall, entry):", "# if parent has zero entries after removing this entry, this should be", "mbr): result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, self.getRootEntry()) return result def doEnclosureQueryWithEarlyStoppingHelper(self, mbr, entry): if entry.getMBR().isRaw()", "overall_str def toString(self): root = self.getRootEntry().getChild() return self.toStringHelper(root) def toStringHelper(self, node): if node", "== True and node == self.getRootEntry().getChild(): node.addEntry(entry) curr_node = entry.getChild() curr_node.setParent(node) mbr =", "== False and reference_mbr.doesEnclose(curr_mbr) == False: continue # item = curr_entry # internal_node_stack_deque.appendleft(item)", "3 * 0.8 # offset = (768 * 0.2) / 2 offset =", "curr_mbr_is_contained == True else 1 second_priority_component = (-1 if curr_mbr_is_contained == True else", "= candidate_entries[0] chosen_child = chosen_entry.getChild() return self.chooseLeafHelper(entry, chosen_child) def rstarChooseLeaf(self, entry): return self.rstarChooseLeafHelper(entry,", "adjust_result = RTree.rstarAdjustTree(self, leaf_node, [entry], False) else: split_result = self.rstarSplitNode(leaf_node, entry) l, ll,", "lower_right_matches = self.getLowerRight() == mbr.getLowerRight() result = upper_left_matches == True and lower_right_matches ==", "in start_rectangle_nodes] start_rectangle_to_close_ancestor_entries_dict = {} for start_rectangle_entry in start_rectangle_entries: start_rectangle_to_close_ancestor_entries_dict[start_rectangle_entry] = [] for", "priority (prefer_contained, prefer_large_area_if_contained_else_small) if self.getRootEntry().getChild().getNumChildren() == 0: return [] reference_mbr = reference_entry.getMBR() root_entry", "entry = tree.getRootEntry() else: entry = node.getParent().retrieveEntryForChild(node) \"\"\" entry = parent.retrieveEntryForChild(node) children =", "result parent = curr_node.getParent() \"\"\" if parent != None and (node in parent.getChildren()):", "tree.delete(entry1) print tree.toString() # tree.delete(entry8) # tree.insert(entry1) \"\"\" tree.delete(entry1) tree.delete(entry2) tree.delete(entry3) tree.delete(entry4) tree.delete(entry5)", "min(area_values) candidate_tagged_area_values = [x for x in tagged_area_values if x[0] == min_area] candidate_entries", "= [x[1] for x in candidate_tagged_area_values] return candidate_entries @staticmethod def rstarGenDistributions(entries, M, m):", "def TopicKNearestNeighborBestFirstSearchHelper(self, heap, point, TopicKNearest, k): def getRectangleCloseDescendantsHelper(self, heap, reference_mbr, result_entry_list, ignore_entry): conflict_x_tree", "draw(self): # im = Image.new(\"RGB\", (512, 512), \"white\") \"\"\" im = Image.new(\"RGB\", (768,", "(RTree.NO_SPLIT, [node]) \"\"\" follow = self.chooseSubtree(entry, node).getChild() result = self.xtreeInsertHelper(entry, follow) split_status, added_nodes", "# takes O(n * log(n)) time at worst; # assumes that rectangles are", "lower_right[i] side = comp2 - comp1 sides.append(side) area = reduce(lambda x, y: x", "(1, 10, 10), (8, 10, 10), (6, 10, 10), (9, 10, 10), (6,", "seconds (~538x slower for 200x growth; expected 1528x slower) # n = 2000", "def toList(self): pair_list = self.heap items = [x[1] for x in pair_list] return", "xrange(component_mbr_list[0].getDimension()): components = [x[i] for x in points] min_comp_value = min(components) max_comp_value =", "None) print tree.doContainmentQuery(curr_mbr3) # raise Exception() print tree.doOverlapQuery(curr_mbr2) # raise Exception() print tree.toString()", "fix overlap logic for determining when to attempt an overlap-minimal split # updated", "is_supernode self.entry = entry def getEntry(self): return self.entry def setEntry(self, entry): self.entry =", "if self.getDimension() == 2: x1, y1 = upper_left x2, y2 = lower_right margin", "RawMBR(point8, (110, 200, 100), point8) node8 = RTreeNode(None, [], True) entry8 = RTreeEntry(mbr8,", "if is_root_node == True: have_node_str = True overall_str_list = None if is_root_node ==", "overall_str = \"(\" + string.join(overall_str_list, \" \") + \")\" return overall_str def toEntriesArePresentString(self):", "prev_leaf_status = node.isLeafNode() prev_leaf_status = None curr_node = node m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M", "not increase domains and # high inter-group overlap means maximal disjointedness # is", "int(random.randint(1, 100)) # y = 10 # z = 10 # lower_right =", "!= self.getRootEntry() else None if leaf_node == None: raise Exception(\"expected a node to", "[x.getMBR() for x in entries] mbr = entry.getMBR() tagged_enlargement_values = [(MBR.getAreaEnlargement(x, mbr), x)", "True is_root_node = node == self.getRootEntry().getChild() if is_root_node == True: have_node_str = True", "next_curr_node = curr_entry.getChild() if curr_entry != entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node2) entry1 = RTreeEntry(curr_overall_mbr1, node1)", "assume item is in tree # returns a node, which can be None", "None if have_node_str == True: curr_leaf_status = \"-\" if node.isLeafNode() == False else", "upper_left = (0, 0) lower_right = (10, 10) mbr = RawMBR(upper_left, lower_right, None)", "self.getNodes() if x.getEntry().getMBR().isRaw() == True] start_rectangle_entries = [x.getEntry() for x in start_rectangle_nodes] start_rectangle_to_close_ancestor_entries_dict", "in partner_entries] partner_mbr_list = [x.getMBR() for x in partner_entries] partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr)", "# returns entries # does intersection query def doOverlapQuery(self, mbr, without_borders = False):", "node1 = RTreeNode(parent, entry_group1, prev_leaf_status) node2 = RTreeNode(parent, entry_group2, prev_leaf_status) for curr_entry in", "MBR.getEnlargedMBR(curr_mbr, mbr) node.getEntry().setMBR(next_mbr) # this parent-setting step is crucial # if node.isNonTraditionalLeafNode() ==", "if have_resulting_second_entry_from_split == True: if (parent.getNumChildren() + 1) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(partner_entry) partner_entry.getChild().setParent(parent) return", "e, ee = resulting_entries_from_split l = e.getChild() ll = ee.getChild() if (self.getRootEntry().getChild().getNumEntries() +", "return self.rstarSplitNodeHelper(node, E_overall, entry) def rstarSplitNodeHelper(self, node, E_overall, entry): # prev_leaf_status = node.isLeafNode()", "child def getMBR(self): return self.mbr def setMBR(self, mbr): self.mbr = mbr def getChild(self):", "curr_entries = entry.getChild().getEntries() children = [x.getChild() for x in curr_entries] mbr_list = [x.getMBR()", "made it this far, we should add children to priority queue entries =", "483), (1300, 1330, 1055))] \"\"\" # n = 10,000 works in 1 min.", "= upper_left[i] comp_1b = lower_right[i] term1 = comp_1b - comp_1a for j in", "return self.mbr_list def isComposite(self): return True @staticmethod def makeMBR(component_mbr_list): upper_left_points = [x.getUpperLeft() for", "for x in tree.getNodes() if x.isSuperNode() == True], tree.getRootEntry().getChild() # tree2.draw() print len(tree2.getNodes())", "mbr_list_pair_tagged_candidate_distributions] overlap_value_tagged_candidate_distributions = [(MBR.findOverlapArea(x[0][0], x[0][1]), x[1]) for x in mbr_pair_tagged_candidate_distributions] overlap_values = [x[0]", "== 2: upper_left = (60, 60) lower_right = (80, 80) elif i %", "= split_result return tree.adjustTree(tree, l, [e, ee], True, False) else: return (False, [])", "0 return is_traditional_leaf_node \"\"\" def isLeafNode(self): # is_leaf_node = (self.getParent() == None and", "[curr_leaf_status] else: overall_str_list = [] for entry in entries: child = entry.getChild() child_str", "= curr_entry.getChild() if curr_entry != entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node2) entry1 = RTreeEntry(curr_overall_mbr1, node1) entry2", "in (100, 10100) and # lower-right's in (ul_i, ul_i + 10000) # two", "20) lower_right = (40, 40) mbr = RawMBR(upper_left, lower_right, None) node = RTreeNode(None,", "<= RTree.MAX_OVERLAP_RATIO: node.setToSuperNode(False) elif node.isUnderfull() == True: \"\"\" if node.isUnderfull() == True: #", "100, 0), (50, 100, 0), point3) curr_mbr2b = RawMBR((50, 50, 0), (100, 100,", "entries: RTreeEntry.drawHelper(tree, entry, image, depth) @staticmethod def drawHelper(tree, entry, image, depth): node =", "set correctly for a leaf, # which is not the case when we", "def getUnionArea(self): pass \"\"\" # takes O(log(n)) time on average for start rectangle", "= entry def getEntry(self): return self.entry def setEntry(self, entry): self.entry = entry def", "= RTreeNode(None, [], True) entry1 = RTreeEntry(mbr1, node1) node1.setEntry(entry1) tree.insert(entry1) mbr2 = RawMBR(point2,", "- x1 return margin if self.getDimension() == 2: x1, y1 = upper_left x2,", "self.hasConsistentNonTraditionalLeafDepthValuesHelper(root, depth, 0) def hasConsistentNonTraditionalLeafDepthValuesHelper(self, node, depth, curr_depth): if node == None: return", "= node.getParent() if parent != None: curr_entries = node.getEntries() entry = node.getParent().retrieveEntryForChild(node) children", "i % 4 == 0: upper_left = (0, 0) lower_right = (10, 10)", "chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return self.rstarChooseLeafHelper(entry, chosen_child) def insert(self, entry): return", "of actual rectangles or leaves; # assumes that rectangles are distinct def getAllRectangleCloseAncestors(self):", "+ 2 + 1)] window_left_sizes = [x for x in window_left_sizes if x", "for x in start_rectangle_nodes] start_rectangle_to_close_ancestor_entries_dict = {} for start_rectangle_entry in start_rectangle_entries: start_rectangle_to_close_ancestor_entries_dict[start_rectangle_entry] =", "upper_left_points = [x.getUpperLeft() for x in mbr_list] lower_right_points = [x.getLowerRight() for x in", "CompositeMBR.makeMBR(mbr_collection1) mbr2 = CompositeMBR.makeMBR(mbr_collection2) overlap_area = MBR.findOverlapArea(mbr1, mbr2) area1 = mbr1.getArea() area2 =", "= None candidate_distributions = result[axis][0] + result[axis][1] mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for y in", "matching_entries = conflict_x_tree.doContainmentQuery(mbr) for matching_entry in matching_entries: # raise Exception() conflict_x_tree.delete(matching_entry) # if", "chosen_child = chosen_entry.getChild() return chosen_entry def xtreeInsert(self, entry): # print \"insert\" return self.xtreeInsertHelper(entry,", "= time.time() result = tree2.getAllRectangleCloseAncestors() time2 = time.time() time_diff = time2 - time1", "in self.getNodes() if x.getEntry().getMBR().isRaw() == True] start_rectangle_entries = [x.getEntry() for x in start_rectangle_nodes]", "= y1 * multiplier + offset next_x2 = x2 * multiplier + offset", "(60, 100, 0) point5 = (70, 100, 0) point6 = (80, 100, 0)", "return do_overlap @staticmethod def findOverlapArea(mbr_a, mbr_b): if MBR.doOverlap(mbr_a, mbr_b) == False: return 0", "self.chooseLeafHelper(entry, chosen_child) def rstarChooseLeaf(self, entry): return self.rstarChooseLeafHelper(entry, self.getRootEntry().getChild()) def rstarChooseLeafHelper(self, entry, node): if", "if len(entries) > (M + 1): raise Exception() window_left_sizes = [m - 1", "found # remove E from L # call algorithm condenseTree(L) # if the", "for an r-tree and O(n * log(n)) time at worst; # and to", "curr_child = curr_entry.getChild() (self.child_to_entry_dict)[curr_child] = curr_entry self.split_history_root_dimension = split_history_root_dimension self.is_supernode = is_supernode self.entry", "result def toList(self): pair_list = self.heap items = [x[1] for x in pair_list]", "for x in low_constituent_mbr_list_pairs] low_margin_values = [x[0].getMarginValue() + x[1].getMarginValue() for x in low_mbr_pairs]", "x in low_comp_distributions] low_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x in low_constituent_mbr_list_pairs] low_margin_values =", "self.hasConsistentNonTraditionalLeafDepthValuesHelper(curr_node, depth, curr_depth + 1) if result == False: return False return True", "tree.delete(entry1) tree.delete(entry2) tree.delete(entry3) tree.delete(entry4) tree.delete(entry5) tree.delete(entry6) tree.delete(entry7) tree.delete(entry8) \"\"\" print tree.toString() tree2 =", "= min(components) max_comp_value = max(components) min_components.append(min_comp_value) max_components.append(max_comp_value) upper_left_point = tuple(min_components) lower_right_point = tuple(max_components)", "x in window_size_pairs if x[0] <= M and x[0] >= m and x[1]", "== True: curr_leaf_status = str(node.getNumChildren()) overall_str_list = [curr_leaf_status] else: overall_str_list = [] for", "conflict_x_tree.toString() # for a well-formed r-tree, this takes O(n * log(n)) time, #", "RTreeEntry(mbr4, node4) node4.setEntry(entry4) tree.insert(entry4) mbr5 = RawMBR(point5, (110, 200, 100), point5) node5 =", "point\") return mbr.getUpperLeft() def getVec(self): return self.vec def getComponent(self, d): return self.getVec()[d] def", "node in node.getParent().getChildren())) == False else \"+\" overall_str_list = [curr_leaf_status] else: overall_str_list =", "MBR.__init__(self, upper_left, lower_right) self.contained_item = contained_item def isRaw(self): return True @staticmethod def makeMBRFromPoint(point):", "def doOverlap(mbr_a, mbr_b, without_borders = False): upper_left_a = mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight() upper_left_b", "and is_first_call_after_first_pass != True: partner_node = partner_entry.getChild() partner_entries = partner_node.getEntries() partner_children = [x.getChild()", "= MBR.findOverlapArea(mbr1, mbr2) area1 = mbr1.getArea() area2 = mbr2.getArea() union_area = area1 +", "1000 # n = 20000 n = 1000 import math for i in", "def removeEntry(self, entry): curr_child = entry.getChild() (self.child_to_entry_dict).pop(curr_child) def getMinimumNumEntriesPerNode(self): return self.m def getMaximumNumEntriesPerNode(self):", "partial_result): partial_result.append(node) for curr_node in node.getChildren(): self.getNodesHelper(curr_node, partial_result) def getNodesForNode(self, node): node_list =", "entries = node.getEntries() children = node.getChildren() have_node_str = True is_root_node = node ==", "if self.getDimension() == 1: x1 = upper_left[0] x2 = lower_right[0] margin = x2", "entry.getMBR() entries = self.getEntries() tagged_mbr_list = [(x.getMBR(), x) for x in entries] tagged_overlapped_mbr_list", "entry) if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\" else: candidate_entries =", "== False: node.setToSuperNode(True) # questionable if this is really necessary for entry in", "* math.log(1000, 2)) ** (1 / 3.0) / denominator)) # for n =", "True and node == self.getRootEntry().getChild(): node.addEntry(entry) curr_node = entry.getChild() curr_node.setParent(node) mbr = CompositeMBR.makeMBR([entry.getMBR()])", "to counter saturation, domain has to grow with n # n = 100", "candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) chosen_entry", "axis, M, m) entry_group1, entry_group2 = result next_result = (entry_group1, entry_group2, axis, False)", "= 0 while curr_node.isLeafNode() == False: curr_node = curr_node.getChildren()[0] depth = depth +", "= tree.getRectangleCloseDescendants(entry8) print result result = tree.getAllRectangleCloseAncestors() print result print len(result) for entry_to_close_ancestor_entry_list_pair", "def getEntries(self): return (self.child_to_entry_dict).values() def getEntryForChild(self, child_node): return (self.child_to_entry_dict)[child_node] def getChildren(self): return (self.child_to_entry_dict).keys()", "[] for entry in entries: child = entry.getChild() child_str = self.toLeafStatusStringHelper(child) curr_str =", "curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return (have_resulting_second_entry_from_split, resulting_entries_from_split) else: parent = node.getParent() curr_entries", "for y in x[1]]) for x in low_comp_distributions] low_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for", "== self.getRootEntry().getChild(): node.addEntry(entry) curr_node = entry.getChild() curr_node.setParent(node) mbr = CompositeMBR.makeMBR([entry.getMBR()]) node.getEntry().setMBR(mbr) # print", "2016-08-25 to fix overlap logic for determining when to attempt an overlap-minimal split", "if node is a leaf node, it has an actual rectangle # decide", "(1 / 3.0) / denominator)) # for n = 5500 # k =", "overlap_value_tagged_candidate_distributions] min_overlap_value = min(overlap_values) matching_overlap_value_tagged_candidate_distributions = [x for x in overlap_value_tagged_candidate_distributions if x[0]", "j in xrange(len(window_left_sizes))] curr_tuple = (low_distributions, upper_distributions) result_list.append(curr_tuple) return result_list @staticmethod def rstarChooseSplitAxis(entries,", "if this is really necessary for entry in entries: curr_node = entry.getChild() node.addEntry(entry)", "root_entry = RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) return else: entry = self.getRootEntry() curr_entries =", "like leaf nodes # keep_nodes = [x for x in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw()", "((596, 892, 131), (1543, 1838, 669)), \\ ((879, 319, 789), (1877, 744, 791)),", "points] min_comp_value = min(components) max_comp_value = max(components) min_components.append(min_comp_value) max_components.append(max_comp_value) upper_left_point = tuple(min_components) lower_right_point", "curr_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), mbr) == True: self.doContainmentQueryHelper(mbr, curr_entry, partial_result) # prefix", "getContainedItem(self): return self.contained_item def getMBRList(self): return [self] def clone(self): upper_left = self.getUpperLeft() lower_right", "candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\"", "return (RTree.NO_SPLIT, [node]) if node.isLeafNode() == True: # split just in case #", "= tree.getAllRectangleCloseAncestors() print result print len(result) for entry_to_close_ancestor_entry_list_pair in result.items(): entry, close_ancestor_entry_list =", "RTree.SUPERNODE: pass # print \"no split\" return (RTree.NO_SPLIT, [node]) def rstarInsert(self, entry): leaf_node", "True return result class CompositeMBR(MBR): def __init__(self, upper_left, lower_right, mbr_list): MBR.__init__(self, upper_left, lower_right)", "return self.getVec()[d] def getIDValue(self): return self.id_value import string class RTree: def __init__(self): root_node", "32767) elif color_choice == 2: color = PythonMagick.Color(0, 65535, 0, 32767) if upper_left", "(self.child_to_entry_dict)[node] def toString(self): return str(self.getEntries()) class RTreeEntry: def __init__(self, mbr, child): self.mbr =", "= RTreeNode(parent, entry_group1, prev_leaf_status) node2 = RTreeNode(parent, entry_group2, prev_leaf_status) for curr_entry in entry_group1:", "def doContainmentQueryHelper(self, mbr, entry, partial_result): if entry.getMBR().isRaw() == True: # print mbr.toString(), entry.getMBR().toString()", "split_result = tree.rstarSplitNode(parent, partner_entry) l, ll, e, ee = split_result return RTree.rstarAdjustTreeHelper(tree, node.getParent(),", "curr_node.getEntry() # print \"mbr:\", curr_entry.getMBR().toString() # print \"tree:\", self.toString() self.insert(curr_entry) def condenseTreeHelper(self, node,", "tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return else: # raise Exception() # print \"decision point\"", "m = 2 and M = 4 # n = 1,000 works in", "= [(MBR.findOverlapArea(x, mbr), x) for x in mbr_list] enlargement_values = [x[0] for x", "a \"right\" comp_a2 = max(upper_left_a[i], lower_right_a[i]) # b \"left\" comp_b1 = min(upper_left_b[i], lower_right_b[i])", "Exception() print tree.toString() # tree.delete(entry1) print tree.toString() # tree.delete(entry8) # tree.insert(entry1) \"\"\" tree.delete(entry1)", "slower for 200x growth; expected 1528x slower) # n = 2000 # n", "<= parent.getMaximumNumEntriesPerNode(): parent.addEntry(entry) parent.addEntry(partner_entry) entry.getChild().setParent(parent) partner_entry.getChild().setParent(parent) return tree.adjustTree(tree, parent, [entry], False, False) else:", "ended_with_split2 == True: e, ee = resulting_entries_from_split l = e.getChild() ll = ee.getChild()", "[y.getMBR() for y in x[1]]) for x in low_comp_distributions] low_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1]))", "curr_node.getChildren()[0] depth = depth + 1 return self.hasConsistentNonTraditionalLeafDepthValuesHelper(root, depth, 0) def hasConsistentNonTraditionalLeafDepthValuesHelper(self, node,", "lower_right_a[i]) # a \"right\" comp_a2 = max(upper_left_a[i], lower_right_a[i]) # b \"left\" comp_b1 =", "None) next_root = RTreeNode(None, [entry1, entry2], None, next_root_entry) next_root_entry.setChild(next_root) self.setRootEntry(next_root_entry) node1.setParent(next_root) node2.setParent(next_root) else:", "== lower_right: image.strokeColor(\"none\") image.fillColor(color) center_x = next_x1 center_y = next_y1 radius = 4", "1 second_priority_component = (-1 if curr_mbr_is_contained == True else 1) * curr_mbr_area #", "k = int(round((20000 * math.log(20000, 2)) ** (1 / 3.0) / denominator)) #", "candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\" chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return self.rstarChooseLeafHelper(entry,", "node.getNumChildren() == 0: # if node.isNonTraditionalLeafNode() == True: if node.isLeafNode() == True and", "0) point8 = (110, 100, 0) curr_mbr1 = RawMBR((100, 100, 0), (100, 100,", "(entry_group1, entry_group2, axis) return next_result def xtreeOverlapMinimalSplit(self, node, entry): if node.getSplitHistoryRootDimension() == None:", "leaf_node = self.rstarChooseLeaf(entry) adjust_result = None if leaf_node.isFull() == False: leaf_node.addEntry(entry) entry.getChild().setParent(leaf_node) adjust_result", "872), (500, 279, 1521)), \\ ((332, 886, 493), (822, 1305, 1149)), \\ ((800,", "for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return (have_resulting_second_entry_from_split, resulting_entries_from_split) else: parent", "entry_collection3, entry_collection4, dimension) else: return (True, entry_collection1, entry_collection2, dimension) def xtreeTopologicalSplit(self, node, entry):", "== None: return elif node.isLeafNode() == True: if depth != curr_depth: return False", "max(components) min_components.append(min_comp_value) max_components.append(max_comp_value) upper_left_point = tuple(min_components) lower_right_point = tuple(max_components) result_mbr = CompositeMBR(upper_left_point, lower_right_point,", "priority) item = root_entry pair = (priority,item) heapq.heappush(heap,pair) # print entry_pq # raise", "dimension, do_fail = result2 # raise Exception() if do_fail == True or len(entry_collection3)", "next_x2, next_y2)) if len(entries) == 0: parent = entry.getChild().getParent() mbr = entry.getMBR() location", "CompositeMBR(upper_left_point, lower_right_point, result_mbr_list) return mbr @staticmethod def getAreaEnlargement(base_mbr, mbr): base_mbr_area = base_mbr.getArea() enlarged_mbr", "curr_entry.getChild().setParent(node2) node1.setSplitHistoryRootDimension(dimension) node2.setSplitHistoryRootDimension(dimension) if self.getRootEntry().getChild() == node: next_root_entry = RTreeEntry(next_mbr, None) next_root =", "(False, []) else: parent = node.getParent() curr_entries = node.getEntries() entry = None if", "x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return (have_resulting_second_entry_from_split, resulting_entries_from_split) else: parent =", "in entry_group2] curr_overall_mbr1 = CompositeMBR.makeMBR(mbr_group1) curr_overall_mbr2 = CompositeMBR.makeMBR(mbr_group2) for curr_entry in entry_group1: next_curr_node", "entry5 = RTreeEntry(mbr5, node5) node5.setEntry(entry5) tree.insert(entry5) mbr6 = RawMBR(point6, (110, 200, 100), point6)", "# print \"no split\" return (RTree.NO_SPLIT, [node]) if node.isLeafNode() == True: # split", "self.rstarChooseLeafHelper(entry, self.getRootEntry().getChild()) def rstarChooseLeafHelper(self, entry, node): if node.isLeafNode() == True: if node ==", "mbr_pair_tagged_next_candidate_distributions] combined_area_values = [x[0] for x in combined_area_tagged_next_candidate_distributions] min_combined_area_value = min(combined_area_values) matching_combined_area_tagged_next_candidate_distributions =", "down branches explored; # to counter saturation, domain has to grow with n", "second_priority_component) # priority = -1 * root_mbr_area # entry_pq = PriorityQueue() heap =", "Exception() for entry_to_close_ancestor_entry_list_pair in result.items(): entry, close_ancestor_entry_list = entry_to_close_ancestor_entry_list_pair print \"start rectangle:\", entry.getMBR().toString()", "is that # otherwise the directed graph implied by the r-tree # is", "def getDistance(point1, point2): x1, y1 = point1 x2, y2 = point2 change_x =", "curr_mbr.doesEnclose(reference_mbr) == False and reference_mbr.doesEnclose(curr_mbr) == False: continue # item = curr_entry #", "sec. for pypy with m = 8 and M = 16 # these", "to be consistent # we never split a super-node # updated on 2016-08-23", "* multiplier + offset next_y1 = y1 * multiplier + offset next_x2 =", "entry): self.entry = entry def isSuperNode(self): return self.is_supernode def setToSuperNode(self, is_supernode): self.is_supernode =", "for y in x[0]], [y.getMBR() for y in x[1]]) for x in low_comp_distributions]", "\"\"\" if node.isUnderfull() == True: # print \"underfull\" parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) #", "node == self.getRootEntry().getChild(): node.addEntry(entry) curr_node = entry.getChild() curr_node.setParent(node) mbr = CompositeMBR.makeMBR([entry.getMBR()]) node.getEntry().setMBR(mbr) #", "entry, partial_result): if entry.getMBR().isRaw() == True: if entry.getMBR().doesEnclose(mbr) == True: partial_result.append(entry) else: entries", "# root = self.getRootEntry().getChild() \"\"\" if root.getNumChildren() == 1: # shorten tree entries", "mbr_list = [x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) @staticmethod def", "self.resolveEnlargementTie(candidate_entries, entry) \"\"\" chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return self.rstarChooseLeafHelper(entry, chosen_child) def", "= mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight() do_overlap =", "non-existent parent of root; # also, we implement delete(); note that our tree", "700)), \\ ((297, 196, 750), (1085, 718, 1259)), \\ ((808, 926, 151), (889,", "x in overlap_value_tagged_candidate_distributions] min_overlap_value = min(overlap_values) matching_overlap_value_tagged_candidate_distributions = [x for x in overlap_value_tagged_candidate_distributions", "\\ ((294, 238, 1036), (785, 378, 1963)), \\ ((803, 1054, 307), (1776, 1597,", "100) # z1 = int(100 + random.randint(0, k) * 100) # x2 =", "x in pair_list] return items def getSize(self): return len(self.heap) import math def getDistance(point1,", "in tree.getNodes() if x.isSuperNode() == True], tree.getRootEntry().getChild() # tree2.draw() print len(tree2.getNodes()) import time", "low_distributions = [(low_sorted_entries[ : window_left_sizes[j]], low_sorted_entries[window_left_sizes[j] : ]) for j in xrange(len(window_left_sizes))] upper_sorted_entries", "okay leaf_node.removeEntry(entry) self.condenseTree(leaf_node) # root = self.getRootEntry().getChild() \"\"\" if root.getNumChildren() == 1: #", "\" \") + \")\" return overall_str def chooseEntriesWithMinimalOverlapEnlargement(self, entries, entry): mbr_to_entry_dict = {}", "mbr_pair_tagged_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for x in mbr_list_pair_tagged_candidate_distributions] overlap_value_tagged_candidate_distributions = [(MBR.findOverlapArea(x[0][0], x[0][1]),", "next_root = RTreeNode(None, [entry1, entry2], False) self.getRootEntry().setChild(next_root) next_root.setEntry(self.getRootEntry()) node1.setParent(next_root) node2.setParent(next_root) pass return (node1,", "y2, z2) upper_left = (x, y) lower_right = (x, y) # upper_left =", "xrange(mbr_a.getDimension()): # a \"left\" comp_a1 = min(upper_left_a[i], lower_right_a[i]) # a \"right\" comp_a2 =", "else: entry = parent.retrieveEntryForChild(node) children = [x.getChild() for x in curr_entries] mbr_list =", "max_components.append(max_comp_value) upper_left_point = tuple(min_components) lower_right_point = tuple(max_components) result_mbr_list = base_mbr.getMBRList() + [mbr] mbr", "SPLIT = 0 SUPERNODE = 1 NO_SPLIT = 2 def xtreeInsertHelper(self, entry, node):", "tree.insert(entry8) print tree.toString() print tree.doEnclosureQuery(curr_mbr2) curr_mbr3 = RawMBR((50, 100, 0), (110, 200, 100),", "= False): self.parent = parent self.is_leaf = is_leaf self.m = 8 self.M =", "curr_entry in entries: curr_child = curr_entry.getChild() (self.child_to_entry_dict)[curr_child] = curr_entry self.split_history_root_dimension = split_history_root_dimension self.is_supernode", "= (1536 * 0.2) / 2 next_x1, next_y1 = (multiplier * x1 +", "lower_right = lower_rights[i] mbr = RawMBR(upper_left, lower_right, None) node = RTreeNode(None, [], True)", "False def delete(self, entry): # print \"hello\" did_find_leaf = self.findLeaf(entry) child_node = entry.getChild()", "x in mbr_list]) for curr_mbr in mbr_list: next_mbr = RawMBR(curr_mbr.getUpperLeft(), curr_mbr.getLowerRight(), None) next_node", "= 1000 import math for i in xrange(n): upper_left = None lower_right =", "return \"\" entries = node.getEntries() children = node.getChildren() have_node_str = True overall_str_list =", "= (80, 80) elif i % 4 == 3: upper_left = (100, 100)", "for i in range(len(entries)): curr_entry = entries[i] curr_mbr = curr_entry.getMBR() mbr_to_entry_dict[curr_mbr] = curr_entry", "= 4 # n = 1,000 works in 3.428 sec. for pypy with", "== True or len(entry_collection3) < node.getMinimumNumEntriesPerNode() or len(entry_collection4) < node.getMinimumNumEntriesPerNode(): return (False, None,", "False: return 0 else: upper_left_a = mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft()", "x[0] == min_combined_area_value] next_next_candidates = [x[1] for x in matching_combined_area_tagged_next_candidate_distributions] chosen_distribution_pair = next_next_candidates[0]", "# print \"mbr:\", curr_entry.getMBR().toString() # print \"tree:\", self.toString() self.insert(curr_entry) def condenseTreeHelper(self, node, Q):", "don't necessarily need PythonMagick # note that nodes always point to same entries", "in mbr_list] lower_right_points = [x.getLowerRight() for x in mbr_list] points = upper_left_points +", "238, 1036), (785, 378, 1963)), \\ ((803, 1054, 307), (1776, 1597, 501)), \\", "curr_node.isLeafNode() == False: curr_node = curr_node.getChildren()[0] depth = depth + 1 return self.hasConsistentNonTraditionalLeafDepthValuesHelper(root,", "= comp2 - comp1 sides.append(side) area = reduce(lambda x, y: x * y,", "x in matching_overlap_value_tagged_candidate_distributions] if len(matching_overlap_value_tagged_candidate_distributions) > 1: next_candidate_distributions = next_next_candidates mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR()", "[node]) \"\"\" follow = self.chooseSubtree(entry, node).getChild() result = self.xtreeInsertHelper(entry, follow) split_status, added_nodes =", "True) else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False) else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(), resulting_entries_from_split, have_resulting_second_entry_from_split)", "= self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries() axis = node.getSplitHistoryRootDimension() result = RTree.rstarChooseSplitIndex(E_overall, axis, M,", "non-traditional leaf leaf_node = child_node.getParent() if entry != self.getRootEntry() else None if leaf_node", "E_overall, entry) def rstarSplitNodeHelper(self, node, E_overall, entry): # prev_leaf_status = node.isLeafNode() prev_leaf_status =", "CompositeMBR.makeMBR([entry.getMBR()]) node.getEntry().setMBR(mbr) # print \"no split\" return (RTree.NO_SPLIT, [node]) if node.isLeafNode() == True:", "needed this curr_node.setParent(node) \"\"\" entries = node.getEntries() mbr_list = [x.getMBR() for x in", "overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \" \") + \")\" return overall_str def", "== True: result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, curr_entry) if result == True: return True return", "overlap_values = [x[0] for x in overlap_value_tagged_candidate_distributions] min_overlap_value = min(overlap_values) matching_overlap_value_tagged_candidate_distributions = [x", "result print len(result) for entry_to_close_ancestor_entry_list_pair in result.items(): entry, close_ancestor_entry_list = entry_to_close_ancestor_entry_list_pair print \"start", "is not acyclic and we have cliques # note that we don't necessarily", "upper_left x2, y2 = lower_right multiplier = 1 / (1.0 * 6.5) *", "to speak of modifying mbr if we plan on keeping the node if", "return chosen_distribution_pair def chooseLeaf(self, entry): return self.chooseLeafHelper(entry, self.getRootEntry().getChild()) def chooseLeafHelper(self, entry, node): if", "upper_left = (0, 0) lower_right = (10, 10) elif i % 4 ==", "if curr_entry.getMBR().doesEnclose(mbr) == True: result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, curr_entry) if result == True: return", "def rstarInsert(self, entry): leaf_node = self.rstarChooseLeaf(entry) adjust_result = None if leaf_node.isFull() == False:", "self.getRootEntry().getChild() return self.toDepthStringHelper(root, 0) def toDepthStringHelper(self, node, depth): if node == None: return", "toEntriesArePresentStringHelper(self, node): if node == None: return \"\" entries = node.getEntries() children =", "parent != None: original_entry = parent.retrieveEntryForChild(curr_node) parent.removeEntry(original_entry) if node != self.getRootEntry().getChild(): parent.addEntry(entry1) parent.addEntry(entry2)", "= max(components) min_components.append(min_comp_value) max_components.append(max_comp_value) upper_left_point = tuple(min_components) lower_right_point = tuple(max_components) result_mbr = CompositeMBR(upper_left_point,", "(1543, 1838, 669)), \\ ((879, 319, 789), (1877, 744, 791)), \\ ((1081, 1056,", ": ] low_sorted_entries.sort(key = lambda x: x.getMBR().getUpperLeft()[i]) low_distributions = [(low_sorted_entries[ : window_left_sizes[j]], low_sorted_entries[window_left_sizes[j]", "entries: tree2.insert(entry) \"\"\" if entry.getChild().getParent() == None: raise Exception() \"\"\" # print tree.toString()", "otherwise the directed graph implied by the r-tree # is not acyclic and", "__init__(self, upper_left, lower_right, mbr_list): MBR.__init__(self, upper_left, lower_right) self.mbr_list = mbr_list def getMBRList(self): return", "- comp_2a term = 2 * term1 * term2 surface_area += term margin", "entry) was_successful, entry_collection1, entry_collection2, dimension = split_result if was_successful == True: mbr_collection1 =", "MBR.doOverlap(mbr_a, mbr_b) == False: return 0 else: upper_left_a = mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight()", "pair return result def toList(self): pair_list = self.heap items = [x[1] for x", "[], True) entry4 = RTreeEntry(mbr4, node4) node4.setEntry(entry4) tree.insert(entry4) mbr5 = RawMBR(point5, (110, 200,", "(100, 100, 0), HyperRectangle((50, 50, 0), (100, 100, 0), 1)) tree = RTree()", "sides = [] for i in xrange(dimension): comp_a1 = upper_left_a[i] comp_a2 = lower_right_a[i]", "(889, 1755, 320)), \\ ((945, 260, 1091), (1932, 332, 1133)), \\ ((262, 221,", "root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) def getRootEntry(self): return self.root_entry def setRootEntry(self, root_entry): self.root_entry = root_entry", "32767) if upper_left == lower_right: image.strokeColor(\"none\") image.fillColor(color) center_x = next_x1 center_y = next_y1", "None and (node in parent.getChildren()): pass \"\"\" node1 = RTreeNode(parent, entry_group1, prev_leaf_status) node2", "include associated entry in result; # if we made it this far, we", "reference_mbr.doesEnclose(curr_mbr) == True: # heapq.heappush(heap,pair) priority_tagged_internal_entries.sort(key = lambda x: x[0], reverse = True)", "(768 * 0.2) / 2 offset = (1536 * 0.2) / 2 x1", "3 color = None if color_choice == 0: color = PythonMagick.Color(65535, 0, 0,", "start_rectangle_to_close_ancestor_entries_dict[start_rectangle_entry] = [] for start_rectangle_entry in start_rectangle_entries: close_descendant_entries = self.getRectangleCloseDescendants(start_rectangle_entry) for close_descendant_entry in", "points = upper_left_points + lower_right_points min_components = [] max_components = [] for i", "[] for curr_entry in entries: # set priority correctly and add to priority", "40) elif i % 4 == 2: upper_left = (60, 60) lower_right =", "[x[0].getMarginValue() + x[1].getMarginValue() for x in upper_mbr_pairs] upper_margin_value_sum = sum(upper_margin_values) S_comp_value += upper_margin_value_sum", "(node in parent.getChildren()): pass \"\"\" node1 = RTreeNode(parent, entry_group1, prev_leaf_status) node2 = RTreeNode(parent,", "= CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if have_resulting_second_entry_from_split == True: parent.removeEntry(entry) if (parent.getNumChildren() + 2) <=", "contained_item): MBR.__init__(self, upper_left, lower_right) self.contained_item = contained_item def isRaw(self): return True @staticmethod def", "= candidate_entries[0] chosen_child = chosen_entry.getChild() return chosen_entry def xtreeInsert(self, entry): # print \"insert\"", "100x growth; expected 664x slower) # n = 14500 # 170.053 seconds (~398x", "mbr.getUpperLeft() != mbr.getLowerRight(): raise Exception(\"attempted to turn a non-point mbr to a point\")", "sides.append(side) intersection_volume = reduce(lambda x, y: x * y, sides) return intersection_volume def", "node.isSuperNode() == True: # raise Exception() return (False, None, None, None) dimension =", "for x in self.getEntries()]) return is_non_traditional_leaf_node \"\"\" def isTraditionalLeafNode(self): is_traditional_leaf_node = self.getNumEntries() ==", "== True else 1) * curr_mbr_area # min-pq # priority = (first_priority_component, second_priority_component)", "# high inter-group overlap means maximal disjointedness # is not going to be", "e, ee = split_result resulting_entries_from_split = [e, ee] next_root = RTreeNode(None, resulting_entries_from_split, False,", "node) node.setEntry(entry) entries.append(entry) for i in xrange(1000): upper_left = (0, 0) lower_right =", "def getRectangleCloseDescendants(self, reference_entry): # repeatedly pop nodes, prune using enclosure/containment # w.r.t. reference", "rstarSplitNode(self, node, entry): curr_node = node E_overall = list(set(curr_node.getEntries() + [entry])) return self.rstarSplitNodeHelper(node,", "0 if root_mbr_is_contained == True else 1 second_priority_component = (-1 if root_mbr_is_contained ==", "def toStringHelper(self, node): if node == None: return \"\" entries = node.getEntries() children", "self.toLeafStatusStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \" \") +", "== None or (node.getParent() != None and node in node.getParent().getChildren())) == False else", "# prefix order def getNodes(self): node_list = [] self.getNodesHelper(self.getRootEntry().getChild(), node_list) return node_list def", "candidate_entries = None # if node.isLeafNode() == True: candidate_entries = self.chooseEntriesWithMinimalOverlapEnlargement(entries, entry) if", "= 1000 # 1.1649 seconds (~2.72x slower for 10x growth; expected 33x slower)", "center_y, perimeter_x, perimeter_y)) else: image.strokeColor(color) image.fillColor(\"none\") image.strokeWidth(4) image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2)) if len(entries)", "\"\"\" elif node.getNumChildren() == 0: pass return (RTree.NO_SPLIT, [node]) \"\"\" follow = self.chooseSubtree(entry,", "for x in candidate_tagged_enlargement_values] return candidate_entries def chooseEntriesWithMinimalAreaEnlargement(self, entries, entry): mbr_to_entry_dict = {}", "def isSuperNode(self): return self.is_supernode def setToSuperNode(self, is_supernode): self.is_supernode = is_supernode def getSplitHistoryRootDimension(self): return", "in x[1]]) for x in low_comp_distributions] low_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x in", "= (first_priority_component, second_priority_component) if curr_mbr.isRaw() == True: priority = -1 * curr_mbr_area item", "= 0 SUPERNODE = 1 NO_SPLIT = 2 def xtreeInsertHelper(self, entry, node): split_status", "node.getParent() == None: entry = tree.getRootEntry() else: entry = parent.retrieveEntryForChild(node) children = [x.getChild()", "200, 100), point1) node1 = RTreeNode(None, [], True) entry1 = RTreeEntry(mbr1, node1) node1.setEntry(entry1)", "lower_right, None) node = RTreeNode(None, [], True) entry = RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry)", "k = 1 # k = int(round(denominator / denominator)) # for n =", "toList(self): pair_list = self.heap items = [x[1] for x in pair_list] return items", "from L # call algorithm condenseTree(L) # if the root has only one", "<= node.getMaximumNumEntriesPerNode(): mbr_list = [x.getMBR() in node.getEntries()] curr_x_tree = RTree() overlap_area_sum = sum([x.getArea()", "radius = 2 perimeter_x = next_x + offset perimeter_y = next_y + offset", "= (self.getParent() == None and self.getNumChildren() == 0) or (self.getNumChildren() != 0 and", "parent pointers for xtreeInsert(); # have supernode demotion when size decreases to or", "curr_mbr3 = RawMBR((50, 100, 0), (110, 200, 100), None) print tree.doContainmentQuery(curr_mbr3) # raise", "containment queries # dimension is implicit (determined using points sampled) and assumed to", "is_first_call_after_first_pass != True: partner_node = partner_entry.getChild() partner_entries = partner_node.getEntries() partner_children = [x.getChild() for", "and M = 16 # n = 6,000 works in 56.672 sec. for", "False: curr_node = curr_node.getChildren()[0] depth = depth + 1 return self.hasConsistentNonTraditionalLeafDepthValuesHelper(root, depth, 0)", "== True return result class CompositeMBR(MBR): def __init__(self, upper_left, lower_right, mbr_list): MBR.__init__(self, upper_left,", "= self.xtreeTopologicalSplit(node, entry) entry_collection1, entry_collection2, dimension = result1 mbr_collection1 = [x.getMBR() for x", "candidate_entries @staticmethod def rstarGenDistributions(entries, M, m): result_list = [] if len(entries) > (M", "problem here tree.insert(entry8) print tree.toString() print tree.doEnclosureQuery(curr_mbr2) curr_mbr3 = RawMBR((50, 100, 0), (110,", "mbr_collection2 = [x.getMBR() for x in entry_collection2] mbr1 = CompositeMBR.makeMBR(mbr_collection1) mbr2 = CompositeMBR.makeMBR(mbr_collection2)", "this is really necessary for entry in entries: curr_node = entry.getChild() node.addEntry(entry) #", "((358, 815, 372), (761, 1089, 594)), \\ ((294, 238, 1036), (785, 378, 1963)),", "node) node.setEntry(entry) # entries.append(entry) \"\"\" # for entry in entries[0 : 4]: #", "delete(); note that our tree # has entry-aware nodes; made bug fix for", "def toString(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() result = str(list(upper_left + lower_right)", "True) ended_with_split2, resulting_entries_from_split = adjust_result if ended_with_split2 == True: e, ee = resulting_entries_from_split", "close-ancestor finding; the assumption is necessary # to make strong running time estimates;", "node.getNumChildren() == 0 and node == self.getRootEntry().getChild(): # if node.getNumChildren() == 0: #", "= (20, 20) lower_right = (40, 40) elif i % 4 == 2:", "overlap_ratio = overlap_area / (1.0 * union_area) # raise Exception() if overlap_ratio >", "self.getRootEntry().getChild()) SPLIT = 0 SUPERNODE = 1 NO_SPLIT = 2 def xtreeInsertHelper(self, entry,", "= [x[1] for x in matching_combined_area_tagged_next_candidate_distributions] chosen_distribution_pair = next_next_candidates[0] return chosen_distribution_pair def chooseLeaf(self,", "do_overlap = do_overlap and comp_a1 <= comp_b2 and comp_a2 >= comp_b1 if do_overlap", "tree2 = RTree() import random entries = [] # lower_rights = [(3, 10,", "0 for i in xrange(self.getDimension()): comp_1a = upper_left[i] comp_1b = lower_right[i] term1 =", "return True def toNumChildrenString(self): root = self.getRootEntry().getChild() return self.toNumChildrenStringHelper(root) def toNumChildrenStringHelper(self, node): if", "(100, 100) lower_right = (120, 120) \"\"\" denominator = (100 * math.log(100, 2))", "do_fail = result2 # raise Exception() if do_fail == True or len(entry_collection3) <", "[mbr_to_entry_dict[x[1]] for x in candidate_tagged_enlargement_values] return candidate_entries def resolveEnlargementTie(self, entries, entry): mbr =", "1028), (1762, 1795, 1309)), \\ ((225, 359, 290), (579, 950, 700)), \\ ((297,", "= self.getLowerRight() upper_left2 = mbr.getUpperLeft() lower_right2 = mbr.getLowerRight() is_equal = upper_left1 == upper_left2", "# raise Exception() if node.getNumChildren() <= 1: # raise Exception() node.setToSuperNode(False) elif node.getNumChildren()", "mbr_list] lower_right_points = [x.getLowerRight() for x in mbr_list] points = upper_left_points + lower_right_points", "return (self.child_to_entry_dict).keys() def getNumEntries(self): return len(self.child_to_entry_dict) def getNumChildren(self): return self.getNumEntries() def setParent(self, node):", "((332, 886, 493), (822, 1305, 1149)), \\ ((800, 709, 871), (1390, 1402, 1548)),", "is number of actual rectangles # or leaves in r-tree; these times assume", "for i in range(len(window_left_sizes))] window_size_pairs = [x for x in window_size_pairs if x[0]", "entry.getMBR() if mbr.doesEnclose(reference_mbr) == False and reference_mbr.doesEnclose(mbr) == False: # ignore node if", "self.xtreeInsertHelper(entry, self.getRootEntry().getChild()) SPLIT = 0 SUPERNODE = 1 NO_SPLIT = 2 def xtreeInsertHelper(self,", "or len(entry_collection3) < node.getMinimumNumEntriesPerNode() or len(entry_collection4) < node.getMinimumNumEntriesPerNode(): return (False, None, None, dimension)", "return (None, None, None, True) else: m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall", "# offset = (768 * 0.2) / 2 offset = (1536 * 0.2)", "x in self.getEntries()]) return is_non_traditional_leaf_node \"\"\" def isTraditionalLeafNode(self): is_traditional_leaf_node = self.getNumEntries() == 0", "(x, y) # upper_left = ul_lr_pairs[i][0] # lower_right = ul_lr_pairs[i][1] # x =", "0 while curr_node.isLeafNode() == False: curr_node = curr_node.getChildren()[0] depth = depth + 1", "in 1 min. 54 sec. for pypy with m = 2 and M", "None, None) dimension = None result1 = self.xtreeTopologicalSplit(node, entry) entry_collection1, entry_collection2, dimension =", "self.findLeaf(entry) child_node = entry.getChild() # root node never has a raw mbr #", "m] for i in xrange(entries[0].getMBR().getDimension()): low_sorted_entries = entries[ : ] low_sorted_entries.sort(key = lambda", "lower_right = (80, 80) elif i % 4 == 3: upper_left = (100,", "candidate_entries = [x[1] for x in candidate_tagged_area_values] return candidate_entries @staticmethod def rstarGenDistributions(entries, M,", "node.getParent() == None: entry = tree.getRootEntry() curr_entries = entry.getChild().getEntries() children = [x.getChild() for", "for i in xrange(self.getDimension()): comp_1a = upper_left[i] comp_1b = lower_right[i] term1 = comp_1b", "node.isLeafNode() == True and node == self.getRootEntry().getChild(): node.addEntry(entry) curr_node = entry.getChild() curr_node.setParent(node) mbr", "[entry1, entry2], None, next_root_entry) next_root_entry.setChild(next_root) self.setRootEntry(next_root_entry) node1.setParent(next_root) node2.setParent(next_root) else: parent.removeEntry(node.getEntry()) parent.addEntry(entry1) parent.addEntry(entry2) #", "for matching_entry in matching_entries: # raise Exception() conflict_x_tree.delete(matching_entry) # if node is a", "self.rstarSplitNode(self.getRootEntry().getChild(), ee) l, ll, e, ee = split_result resulting_entries_from_split = [e, ee] next_root", "5500 # 23.899 seconds (~55.96x slower for 55x growth; expected 317x slower) #", "decreases to or below M # updated on 2016-11-06 to add single-start-rectangle-based #", "follow) split_status, added_nodes = result curr_entry = node.getEntry() curr_mbr = curr_entry.getMBR() mbr =", "/ 3.0) k = 1 # k = int(round(denominator / denominator)) # for", "entry = None if node.getParent() == None: entry = tree.getRootEntry() else: entry =", "node3.setEntry(entry3) tree.insert(entry3) mbr4 = RawMBR(point4, (110, 200, 100), point4) node4 = RTreeNode(None, [],", "contained by reference rectangle; # check explicitly for this case if reference_mbr.doesEnclose(mbr) ==", "(RTree.NO_SPLIT, [node]) if node.isLeafNode() == True: # split just in case # print", "((433, 499, 483), (1300, 1330, 1055))] \"\"\" # n = 10,000 works in", "RawMBR(point1, (110, 200, 100), point1) node1 = RTreeNode(None, [], True) entry1 = RTreeEntry(mbr1,", "y2 = point2 change_x = x2 - x1 change_y = y2 - y1", "= RTree() print tree.toString() curr_root = tree.getRootEntry().getChild() mbr1 = RawMBR(point1, (110, 200, 100),", "= None if have_node_str == True: curr_leaf_status = str(node.getNumChildren()) overall_str_list = [curr_leaf_status] else:", "return (False, []) else: parent = node.getParent() curr_entries = node.getEntries() entry = None", "this should be okay leaf_node.removeEntry(entry) self.condenseTree(leaf_node) # root = self.getRootEntry().getChild() \"\"\" if root.getNumChildren()", "= chosen_entry.getChild() self.setRoot(chosen_child) \"\"\" # if RN is a leaf node # search", "= \"-\" if node.isLeafNode() == False else \"+\" overall_str_list = [curr_leaf_status] else: overall_str_list", "entry) l, ll, e, ee = split_result adjust_result = RTree.rstarAdjustTree(self, l, [e, ee],", "* union_area) if multi_overlap_ratio <= RTree.MAX_OVERLAP_RATIO: node.setToSuperNode(False) elif node.isUnderfull() == True: \"\"\" if", "y2 = lower_right margin = 2 * (x2 - x1) + 2 *", "entry_group1] mbr_group2 = [x.getMBR() for x in entry_group2] curr_overall_mbr1 = CompositeMBR.makeMBR(mbr_group1) curr_overall_mbr2 =", "to add single-start-rectangle-based # close-descendant finding that takes O(log(n)) time on average #", "this case if reference_mbr.doesEnclose(mbr) == False: continue # kick out close descendant candidates", "= resulting_entries_from_split partner_entry = second_entry if have_resulting_second_entry_from_split == True and is_first_call_after_first_pass != True:", "len(heap) != 0: (priority,item) = heapq.heappop(heap) elif len(internal_node_stack_deque) != 0: item = internal_node_stack_deque.popleft()", "k) * 100) # x2 = int(x1 + random.random() * 100) # y2", "y1 = 0 x2 = 47 y2 = 60 next_x1 = x1 *", "m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries() axis = node.getSplitHistoryRootDimension() result", "prefix order def getNodes(self): node_list = [] self.getNodesHelper(self.getRootEntry().getChild(), node_list) return node_list def getNodesHelper(self,", "do do occasionally # note that M of two works import sys #", "child (and it is not a leaf) # remove the root # set", "+ x[0][1].getArea(), x[1]) for x in mbr_pair_tagged_next_candidate_distributions] combined_area_values = [x[0] for x in", "print \"decision point\" \"\"\" if node.isSuperNode() == True: # print \"supernode encountered\" parent", "= x2 * multiplier + offset next_y2 = y2 * multiplier + offset", "entry.getChild() child_str = self.toDepthStringHelper(child, depth + 1) curr_str = child_str overall_str_list.append(curr_str) overall_str =", "union_area == 0: if mbr1.isEqualTo(mbr2) == True: overlap_ratio = 1 else: overlap_ratio =", "x in matching_combined_area_tagged_next_candidate_distributions] chosen_distribution_pair = next_next_candidates[0] return chosen_distribution_pair def chooseLeaf(self, entry): return self.chooseLeafHelper(entry,", "a leaf) # remove the root # set as new root its only", "= curr_node.getEntry() # print \"mbr:\", curr_entry.getMBR().toString() # print \"tree:\", self.toString() self.insert(curr_entry) def condenseTreeHelper(self,", "lower_right = ul_lr_pairs[i][1] # x = int(random.randint(1, 100)) # y = 10 #", "curr_node = node E_overall = list(set(curr_node.getEntries() + [entry])) return self.rstarSplitNodeHelper(node, E_overall, entry) def", "overlap_area / (1.0 * union_area) # raise Exception() if overlap_ratio > RTree.MAX_OVERLAP_RATIO: #", "RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2 = result next_result = (entry_group1, entry_group2, axis)", "for curr_node in node.getChildren(): result = self.hasConsistentNonTraditionalLeafDepthValuesHelper(curr_node, depth, curr_depth + 1) if result", "mbr2.getArea() union_area = area1 + area2 - overlap_area ovelap_ratio = None if union_area", "= RawMBR((100, 100, 0), (100, 100, 0), (100, 100, 0)) curr_mbr2 = RawMBR((50,", "return self.lower_right def getArea(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() sides = []", "one match if such a node exists # def delete(self, E, RN): def", "to same entries # unless we explicitly create new entries, # which we", "if node.isLeafNode() == True: # split just in case # print \"split\" return", "(1776, 1597, 501)), \\ ((803, 233, 521), (1314, 717, 1487)), \\ ((660, 268,", "% 4 == 3: upper_left = (100, 100) lower_right = (120, 120) \"\"\"", "* math.log(100, 2)) ** (1 / 3.0) k = 1 # k =", "# for i in xrange(4): \"\"\" ul_lr_pairs = [((797, 989, 602), (910, 1248,", "for x in overlap_value_tagged_candidate_distributions if x[0] == min_overlap_value] next_next_candidates = [x[1] for x", "= x1 * multiplier + offset next_y1 = y1 * multiplier + offset", "node1.setParent(parent) node2.setParent(parent) else: next_root = RTreeNode(None, [entry1, entry2], False) self.getRootEntry().setChild(next_root) next_root.setEntry(self.getRootEntry()) node1.setParent(next_root) node2.setParent(next_root)", "in mbr_pair_tagged_next_candidate_distributions] combined_area_values = [x[0] for x in combined_area_tagged_next_candidate_distributions] min_combined_area_value = min(combined_area_values) matching_combined_area_tagged_next_candidate_distributions", "((225, 359, 290), (579, 950, 700)), \\ ((297, 196, 750), (1085, 718, 1259)),", "print \"tree, currently:\", tree.toString() # tree2.delete(entry) pass # print tree.toString() result = tree.getRectangleCloseDescendants(entry8)", "= [x for x in tagged_enlargement_values if x[0] == min_enlargement_value] candidate_entries = [mbr_to_entry_dict[x[1]]", "in start_rectangle_entries: start_rectangle_to_close_ancestor_entries_dict[start_rectangle_entry] = [] for start_rectangle_entry in start_rectangle_entries: close_descendant_entries = self.getRectangleCloseDescendants(start_rectangle_entry) for", "xtreeSplitNode(self, node, entry): # we never split a super-node if node.isSuperNode() == True:", "= self.getRectangleCloseDescendants(start_rectangle_entry) for close_descendant_entry in close_descendant_entries: start_rectangle_to_close_ancestor_entries_dict[close_descendant_entry].append(start_rectangle_entry) return start_rectangle_to_close_ancestor_entries_dict def draw(self): # im", "10, 10), (1, 10, 10), (3, 10, 10)] # for i in xrange(10):", "second_entry if have_resulting_second_entry_from_split == True and is_first_call_after_first_pass != True: partner_node = partner_entry.getChild() partner_entries", "+= low_margin_value_sum upper_constituent_mbr_list_pairs = [([y.getMBR() for y in x[0]], [y.getMBR() for y in", "mbr next_mbr = raw_mbr.clone() next_node = RTreeNode(None, [], True) next_entry = RTreeEntry(next_mbr, next_node)", "rectangle # taken from set of actual rectangles for an r-tree; # takes", "# remove the root # set as new root its only child pass", "/ denominator)) # for n = 1000 # k = int(round((5500 * math.log(5500,", "mbr) == True: self.doOverlapQueryHelper(mbr, curr_entry, partial_result, without_borders) # returns entries def doEnclosureQuery(self, mbr):", "leaf) # remove the root # set as new root its only child", "next_curr_node.setParent(node1) for curr_entry in entry_group2: next_curr_node = curr_entry.getChild() if curr_entry != entry: curr_node.removeEntry(curr_entry)", "use isLeafNode() for this, as internal nodes can temporarily look like leaf nodes", "[] for i in xrange(component_mbr_list[0].getDimension()): components = [x[i] for x in points] min_comp_value", "mbr_to_entry_dict[curr_mbr] = curr_entry mbr_list = [x.getMBR() for x in entries] mbr = entry.getMBR()", "l.setParent(next_root) ll.setParent(next_root) self.getRootEntry().setChild(next_root) else: pass MAX_OVERLAP_RATIO = 0.2 def xtreeSplitNode(self, node, entry): #", "== None: raise Exception(\"expected a node to be found for a delete\") #", "node2 = RTreeNode(None, [], True) entry2 = RTreeEntry(mbr2, node2) node2.setEntry(entry2) tree.insert(entry2) mbr3 =", "= self.xtreeSplitNode(node, entry) was_successful, entry_collection1, entry_collection2, dimension = split_result if was_successful == True:", "RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) return else: entry = self.getRootEntry() curr_entries = entry.getChild().getEntries() children", "if do_overlap == False: break return do_overlap @staticmethod def findOverlapArea(mbr_a, mbr_b): if MBR.doOverlap(mbr_a,", "True else 1) * root_mbr_area # min-pq priority = (first_priority_component, second_priority_component) # priority", "tree.doOverlapQuery(curr_mbr2) # raise Exception() print tree.toString() # tree.delete(entry1) print tree.toString() # tree.delete(entry8) #", "is_traditional_leaf_node = self.getNumEntries() == 0 return is_traditional_leaf_node \"\"\" def isLeafNode(self): # is_leaf_node =", "entry_collection1] mbr_collection2 = [x.getMBR() for x in entry_collection2] # this line presumes that", "= is_supernode self.entry = entry def getEntry(self): return self.entry def setEntry(self, entry): self.entry", "matching_combined_area_tagged_next_candidate_distributions] chosen_distribution_pair = next_next_candidates[0] return chosen_distribution_pair def chooseLeaf(self, entry): return self.chooseLeafHelper(entry, self.getRootEntry().getChild()) def", "entries[ : ] low_sorted_entries.sort(key = lambda x: x.getMBR().getUpperLeft()[i]) low_distributions = [(low_sorted_entries[ : window_left_sizes[j]],", "x in entry_group2] curr_overall_mbr1 = CompositeMBR.makeMBR(mbr_group1) curr_overall_mbr2 = CompositeMBR.makeMBR(mbr_group2) for curr_entry in entry_group1:", "4 # n = 1,000 works in 2.996 sec. for pypy with m", "self.getMaximumNumEntriesPerNode() def isUnderfull(self): return self.getNumEntries() < self.getMinimumNumEntriesPerNode() def retrieveEntryForChild(self, node): return (self.child_to_entry_dict)[node] def", "entry continue if node.isLeafNode() == True: # could have a safe path to", "depth = depth + 1 return self.hasConsistentNonTraditionalLeafDepthValuesHelper(root, depth, 0) def hasConsistentNonTraditionalLeafDepthValuesHelper(self, node, depth,", "744, 791)), \\ ((1081, 1056, 1020), (1708, 1075, 1542)), \\ ((358, 815, 372),", "if node.isLeafNode() == True: if node == self.getRootEntry().getChild(): return node else: return node.getParent()", "self.chooseLeafHelper(entry, self.getRootEntry().getChild()) def chooseLeafHelper(self, entry, node): if node.isLeafNode() == True: if node ==", "if entry.getMBR().isRaw() == True: # print mbr.toString(), entry.getMBR().toString() if mbr.doesEnclose(entry.getMBR()) == True: partial_result.append(entry)", "if curr_mbr.isRaw() == True: priority = -1 * curr_mbr_area item = curr_entry pair", "print mbr.toString(), entry.getMBR().toString() if mbr.doesEnclose(entry.getMBR()) == True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for", "RawMBR(upper_left, lower_right, contained_item) return mbr def doesMatch(self, mbr): upper_left_matches = self.getUpperLeft() == mbr.getUpperLeft()", "is_root_node == True: have_node_str = True overall_str_list = None if is_root_node == False:", "tree.getRectangleCloseDescendants(entry8) print result result = tree.getAllRectangleCloseAncestors() print result print len(result) for entry_to_close_ancestor_entry_list_pair in", "E_overall = node.getEntries() axis = RTree.rstarChooseSplitAxis(E_overall, M, m) result = RTree.rstarChooseSplitIndex(E_overall, axis, M,", "= [(3, 10, 10), (1, 10, 10), (8, 10, 10), (6, 10, 10),", "entry.getChild().getEntries() for curr_entry in entries: if curr_entry.getMBR().doesEnclose(mbr) == True: result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, curr_entry)", "self.child def setChild(self, node): self.child = node @staticmethod def draw(tree, entries, image, depth):", "= x2 - x1 change_y = y2 - y1 distance = math.sqrt(change_x **", "# kick out close descendant candidates on occasion, # if containment query for", "# raise Exception() if do_fail == True or len(entry_collection3) < node.getMinimumNumEntriesPerNode() or len(entry_collection4)", "= y2 * multiplier + offset \"\"\" # image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2)) image.write(\"tree.png\")", "2 * term1 * term2 surface_area += term margin = surface_area return margin", "m) entry_group1, entry_group2 = result parent = curr_node.getParent() \"\"\" if parent != None", "reference_mbr = reference_entry.getMBR() root_entry = self.getRootEntry() root_node = root_entry.getChild() root_mbr = root_entry.getMBR() root_mbr_is_actual", "in self.getEntries()]) return is_non_traditional_leaf_node \"\"\" def isTraditionalLeafNode(self): is_traditional_leaf_node = self.getNumEntries() == 0 return", "union_area = curr_x_tree.getUnionArea() multi_overlap_ratio = overlap_area_sum / (1.0 * union_area) if multi_overlap_ratio <=", "self.entry def setEntry(self, entry): self.entry = entry def isSuperNode(self): return self.is_supernode def setToSuperNode(self,", "True overall_str_list = None if have_node_str == True: curr_leaf_status = str(node.getNumChildren()) overall_str_list =", "to attempt an overlap-minimal split # updated on 2016-11-03 to re-structure and modify", "next_candidate_distributions] mbr_pair_tagged_next_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for x in mbr_list_pair_tagged_candidate_distributions] combined_area_tagged_next_candidate_distributions = [(x[0][0].getArea()", "False) return next_result def xtreeSupernodeInsert(self, node, entries): if node.isSuperNode() == False: node.setToSuperNode(True) #", "entry_collection2: curr_entry.getChild().setParent(node2) node1.setSplitHistoryRootDimension(dimension) node2.setSplitHistoryRootDimension(dimension) if self.getRootEntry().getChild() == node: next_root_entry = RTreeEntry(next_mbr, None) next_root", "change_y = y2 - y1 distance = math.sqrt(change_x ** 2 + change_y **", "chosen_child = chosen_entry.getChild() self.setRoot(chosen_child) \"\"\" # if RN is a leaf node #", "for entry in entries: child = entry.getChild() child_str = self.toLeafStatusStringHelper(child) curr_str = child_str", "we should add children to priority queue entries = node.getEntries() priority_tagged_internal_entries = []", "= reduce(lambda x, y: x * y, sides) return area @staticmethod def getEnlargedMBR(base_mbr,", "parent = node.getParent() entry1 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection1), None) node1 = RTreeNode(parent, entry_collection1, None, entry1)", "while len(internal_node_stack_deque) != 0 or len(heap) != 0: # entry = entry_pq.pop() item", "return (RTree.SPLIT, [node]) elif node.isNonTraditionalLeafNode() == True: node.addEntry(entry) entry.getChild().setParent(node) \"\"\" elif node.getNumChildren() ==", "% 4 == 0: upper_left = (0, 0) lower_right = (10, 10) elif", "be None if no match is found # finds one match if such", "def getNodes(self): node_list = [] self.getNodesHelper(self.getRootEntry().getChild(), node_list) return node_list def getNodesHelper(self, node, partial_result):", "updated on 2016-11-03 to re-structure and modify adjustTree(); # stop at root instead", "tree entries = root.getEntries() chosen_entry = entries[0] chosen_child = chosen_entry.getChild() self.setRoot(chosen_child) \"\"\" #", "entry in entries: # if len(tree.getNodes()) != 0: # print \"removing entry with", "node == None: return elif node.isLeafNode() == True: if depth != curr_depth: return", "entry2 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection2), None) node2 = RTreeNode(parent, entry_collection2, None, entry2) entry2.setChild(node2) for curr_entry", "10), (8, 10, 10), (6, 10, 10), (9, 10, 10), (6, 10, 10),", "necessary # to make strong running time estimates; the reason is that #", "for x in next_candidate_distributions] mbr_pair_tagged_next_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for x in mbr_list_pair_tagged_candidate_distributions]", "mbr = CompositeMBR.makeMBR([entry.getMBR()]) node.getEntry().setMBR(mbr) # print \"no split\" return (RTree.NO_SPLIT, [node]) if node.isLeafNode()", "if have_resulting_second_entry_from_split == True: first_entry, second_entry = resulting_entries_from_split partner_entry = second_entry if have_resulting_second_entry_from_split", "for x in entries] tagged_overlapped_mbr_list = [x for x in tagged_mbr_list if MBR.doOverlap(curr_mbr,", "= (0, 0) lower_right = (10, 10) elif i % 4 == 1:", "x in combined_area_tagged_next_candidate_distributions if x[0] == min_combined_area_value] next_next_candidates = [x[1] for x in", "return str(self.getEntries()) class RTreeEntry: def __init__(self, mbr, child): self.mbr = mbr self.child =", "S_comp_value += low_margin_value_sum upper_constituent_mbr_list_pairs = [([y.getMBR() for y in x[0]], [y.getMBR() for y", "chosen_child = chosen_entry.getChild() return self.rstarChooseLeafHelper(entry, chosen_child) def insert(self, entry): return self.xtreeInsert(entry) def chooseSubtree(self,", "def rstarChooseSplitIndex(entries, axis, M, m): result = RTree.rstarGenDistributions(entries, M, m) candidate_distributions = None", "= [(x[0][0].getArea() + x[0][1].getArea(), x[1]) for x in mbr_pair_tagged_next_candidate_distributions] combined_area_values = [x[0] for", "# updated on 2016-11-03 to re-structure and modify adjustTree(); # stop at root", "domains and # high inter-group overlap means maximal disjointedness # is not going", "containment query for conflict x-tree returns entries matching_entries = conflict_x_tree.doContainmentQuery(mbr) for matching_entry in", "if MBR.doOverlap(entry.getMBR(), mbr, without_borders) == True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for curr_entry", "True: self.doOverlapQueryHelper(mbr, curr_entry, partial_result, without_borders) # returns entries def doEnclosureQuery(self, mbr): partial_result =", "== 0 return is_leaf_node def addEntry(self, entry): curr_child = entry.getChild() (self.child_to_entry_dict)[curr_child] = entry", "time estimates; the reason is that # otherwise the directed graph implied by", "x in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() == True] keep_nodes = [x for x in", "for j in xrange(i + 1, self.getDimension()): comp_2a = upper_left[j] comp_2b = lower_right[j]", "y1 = upper_left x2, y2 = lower_right multiplier = 1 / (1.0 *", "= (x, y) # upper_left = ul_lr_pairs[i][0] # lower_right = ul_lr_pairs[i][1] # x", "random.randint(0, 10000) y = random.randint(0, 10000) # upper_left = (x1, y1, z1) #", "entry_group2: next_curr_node = curr_entry.getChild() if curr_entry != entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node2) entry1 = RTreeEntry(curr_overall_mbr1,", "# also, we implement delete(); note that our tree # has entry-aware nodes;", "i in xrange(self.getDimension()): comp_1a = upper_left[i] comp_1b = lower_right[i] term1 = comp_1b -", "RTree.rstarAdjustTree(self, l, [e, ee], True) ended_with_split2, resulting_entries_from_split = adjust_result if ended_with_split2 == True:", "assumes that rectangles are distinct # return a list of entries def getRectangleCloseDescendants(self,", "resulting_entries_from_split) else: parent = node.getParent() curr_entries = node.getEntries() entry = None \"\"\" if", "def getUpperLeft(self): return self.upper_left def getLowerRight(self): return self.lower_right def getIDValue(self): return self.id_value class", "node.getParent(), [e, ee], True) else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False) else: return RTree.rstarAdjustTreeHelper(tree,", "RawMBR(point2, (110, 200, 100), point2) node2 = RTreeNode(None, [], True) entry2 = RTreeEntry(mbr2,", "n = 100 # k = int(round((1000 * math.log(1000, 2)) ** (1 /", "CompositeMBR(None, None, None) root_entry = RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) def getRootEntry(self): return self.root_entry", "expected 33x slower) # n = 5500 # 23.899 seconds (~55.96x slower for", "# print tree.toString() result = tree.getRectangleCloseDescendants(entry8) print result result = tree.getAllRectangleCloseAncestors() print result", "== 0: raise Exception() if self.getDimension() == 1: x1 = upper_left[0] x2 =", "= [x[i] for x in points] min_comp_value = min(components) max_comp_value = max(components) min_components.append(min_comp_value)", "= RTreeEntry(CompositeMBR.makeMBR(mbr_collection2), None) node2 = RTreeNode(parent, entry_collection2, None, entry2) entry2.setChild(node2) for curr_entry in", "== True: e, ee = resulting_entries_from_split l = e.getChild() ll = ee.getChild() if", "* 100) # y1 = int(100 + random.randint(0, k) * 100) # z1", "curr_overall_mbr2 = CompositeMBR.makeMBR(mbr_group2) for curr_entry in entry_group1: next_curr_node = curr_entry.getChild() if curr_entry !=", "def isUnderfull(self): return self.getNumEntries() < self.getMinimumNumEntriesPerNode() def retrieveEntryForChild(self, node): return (self.child_to_entry_dict)[node] def toString(self):", "for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) @staticmethod def rstarAdjustTree(tree, node, resulting_entries_from_split,", "insert(self, entry): return self.xtreeInsert(entry) def chooseSubtree(self, entry, node): entries = node.getEntries() candidate_entries =", "[x for x in tagged_enlargement_values if x[0] == min_enlargement_value] candidate_entries = [mbr_to_entry_dict[x[1]] for", "in entries] mbr = entry.getMBR() tagged_enlargement_values = [(MBR.getAreaEnlargement(x, mbr), x) for x in", "next_x1 = x1 * multiplier + offset next_y1 = y1 * multiplier +", "self.doContainmentQueryHelper(mbr, curr_entry, partial_result) # prefix order def getNodes(self): node_list = [] self.getNodesHelper(self.getRootEntry().getChild(), node_list)", "# w.r.t. reference rectangle, add children to priority queue, # ignore if contained", "1020), (1708, 1075, 1542)), \\ ((358, 815, 372), (761, 1089, 594)), \\ ((294,", "add actual rectangles to conflict x-tree, # use as priority (prefer_contained, prefer_large_area_if_contained_else_small) if", "is a non-traditional leaf leaf_node = child_node.getParent() if entry != self.getRootEntry() else None", "y2 + offset) if depth != 0: pass color_choice = depth % 3", "== True: return result return False def delete(self, entry): # print \"hello\" did_find_leaf", "MBR.doOverlap(curr_entry.getMBR(), mbr) == True: self.doContainmentQueryHelper(mbr, curr_entry, partial_result) # prefix order def getNodes(self): node_list", "found # finds one match if such a node exists # def delete(self,", "return curr_node return None \"\"\" # a little stilted since we don't need", "xrange(10): # for i in xrange(4): \"\"\" ul_lr_pairs = [((797, 989, 602), (910,", "# has entry-aware nodes; made bug fix for adjustTree(); # fixed bug with", "tree.toString() # tree2.delete(entry) pass # print tree.toString() result = tree.getRectangleCloseDescendants(entry8) print result result", "entries.append(entry) \"\"\" # for entry in entries[0 : 4]: # for entry in", "\\ ((297, 196, 750), (1085, 718, 1259)), \\ ((808, 926, 151), (889, 1755,", "y = 10 # z = 10 # lower_right = (x, y, z)", "n = 14500 # 170.053 seconds (~398x slower for 145x growth; expected 1040x", "if node.getNumChildren() <= 1: # raise Exception() node.setToSuperNode(False) elif node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): mbr_list", "break return do_overlap @staticmethod def findOverlapArea(mbr_a, mbr_b): if MBR.doOverlap(mbr_a, mbr_b) == False: return", "= RTreeEntry(mbr1, node1) node1.setEntry(entry1) tree.insert(entry1) mbr2 = RawMBR(point2, (110, 200, 100), point2) node2", "x) <= M and (len(entries) - x) >= m] window_size_pairs = [(window_left_sizes[i], len(entries)", "= self.doEnclosureQueryWithEarlyStoppingHelper(mbr, self.getRootEntry()) return result def doEnclosureQueryWithEarlyStoppingHelper(self, mbr, entry): if entry.getMBR().isRaw() == True:", "2000 # n = 1000 # n = 20000 n = 1000 import", "+ random.random() * 100) x = random.randint(0, 10000) y = random.randint(0, 10000) #", "k = int(round(denominator / denominator)) # for n = 100 # k =", "child_str = self.toLeafStatusStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \"", "x[1]) for x in mbr_list_pair_tagged_candidate_distributions] overlap_value_tagged_candidate_distributions = [(MBR.findOverlapArea(x[0][0], x[0][1]), x[1]) for x in", "(910, 1248, 1035)), \\ ((920, 974, 724), (1802, 1524, 1378)), \\ ((911, 953,", "for entry in entries[0 : 4]: # for entry in entries[0 : 15]:", "8 and M = 16 # these numbers are for upper-left's in (100,", "point3) node3 = RTreeNode(None, [], True) entry3 = RTreeEntry(mbr3, node3) node3.setEntry(entry3) tree.insert(entry3) mbr4", "node_list) return node_list \"\"\" def getUnionArea(self): pass \"\"\" # takes O(log(n)) time on", "doEnclosureQueryWithEarlyStoppingHelper(self, mbr, entry): if entry.getMBR().isRaw() == True: if entry.getMBR().doesEnclose(mbr) == True: return True", "True else: return False else: entries = curr_entry.getChild().getEntries() for next_entry in entries: if", "ignore node if its entry matches the ignore entry continue if node.isLeafNode() ==", "check explicitly for this case if reference_mbr.doesEnclose(mbr) == False: continue # kick out", "= str(list(upper_left + lower_right) + [self.isRaw()]) return result def getDimension(self): return len(self.getUpperLeft()) def", "for x in component_mbr_list] points = upper_left_points + lower_right_points min_components = [] max_components", "entries[0] chosen_child = chosen_entry.getChild() self.setRoot(chosen_child) \"\"\" # if RN is a leaf node", "close_descendant_entries = self.getRectangleCloseDescendants(start_rectangle_entry) for close_descendant_entry in close_descendant_entries: start_rectangle_to_close_ancestor_entries_dict[close_descendant_entry].append(start_rectangle_entry) return start_rectangle_to_close_ancestor_entries_dict def draw(self): #", "e.getChild() ll = ee.getChild() if (self.getRootEntry().getChild().getNumEntries() + 1) <= self.getRootEntry().getChild().getMaximumNumEntriesPerNode(): self.getRootEntry().getChild().addEntry(ee) ll.setParent(self.getRootEntry().getChild()) else:", "x1 * multiplier + offset next_y1 = y1 * multiplier + offset next_x2", "upper_distributions = [(upper_sorted_entries[ : window_left_sizes[j]], upper_sorted_entries[window_left_sizes[j] : ]) for j in xrange(len(window_left_sizes))] curr_tuple", "in low_mbr_pairs] low_margin_value_sum = sum(low_margin_values) S_comp_value += low_margin_value_sum upper_constituent_mbr_list_pairs = [([y.getMBR() for y", "def findLeafHelper(self, entry, curr_entry): \"\"\" if node.isLeafNode() == False: curr_mbr = entry.getMBR() entries", "for x in overlap_value_tagged_candidate_distributions] min_overlap_value = min(overlap_values) matching_overlap_value_tagged_candidate_distributions = [x for x in", "\\ ((798, 928, 1028), (1762, 1795, 1309)), \\ ((225, 359, 290), (579, 950,", "entries = curr_entry.getChild().getEntries() for next_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), entry.getMBR()) == True: result", "else: overlap_ratio = overlap_area / (1.0 * union_area) # raise Exception() if overlap_ratio", "in entry_group1: curr_entry.getChild().setParent(node1) for curr_entry in entry_group2: curr_entry.getChild().setParent(node2) mbr_group1 = [x.getMBR() for x", "in parent.getChildren()): pass \"\"\" node1 = RTreeNode(parent, entry_group1, prev_leaf_status) node2 = RTreeNode(parent, entry_group2,", "return # not tested # returns entries # does intersection query def doOverlapQuery(self,", "# print tree.toString() # for entry in entries[0 : 4]: # print \"supernodes:\",", "= CompositeMBR(upper_left_point, lower_right_point, component_mbr_list) return result_mbr class HyperRectangle: def __init__(self, upper_left, lower_right, id_value):", "x.getMBR().getUpperLeft()[i]) low_distributions = [(low_sorted_entries[ : window_left_sizes[j]], low_sorted_entries[window_left_sizes[j] : ]) for j in xrange(len(window_left_sizes))]", "node, depth): if node == None: return \"\" entries = node.getEntries() children =", "point3 = (50, 100, 0) point4 = (60, 100, 0) point5 = (70,", "lower_right_b[i] side = max(0, min(comp_a2, comp_b2) - max(comp_a1, comp_b1)) sides.append(side) intersection_volume = reduce(lambda", "the assumption is necessary # to make strong running time estimates; the reason", "60 next_x1 = x1 * multiplier + offset next_y1 = y1 * multiplier", "False: # print \"not underfull\" parent = node.getParent() curr_entries = node.getEntries() entry =", "reference_mbr, result_entry_list, reference_entry) return result_entry_list # def TopicKNearestNeighborBestFirstSearchHelper(self, heap, point, TopicKNearest, k): def", "entry.getChild() entries = node.getEntries() mbr_list = [entry.getMBR()] for mbr in mbr_list: upper_left =", "increase n and do not increase domains and # high inter-group overlap means", "the ignore entry continue if node.isLeafNode() == True: # could have a safe", "partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if have_resulting_second_entry_from_split == True: parent.removeEntry(entry) if (parent.getNumChildren() + 2)", "pypy with m = 2 and M = 4 # n = 1,000", "ee = split_result adjust_result = RTree.rstarAdjustTree(self, l, [e, ee], True) ended_with_split2, resulting_entries_from_split =", "priority = -1 * root_mbr_area # entry_pq = PriorityQueue() heap = [] #", "= [x.getMBR() for x in entry_group1] mbr_group2 = [x.getMBR() for x in entry_group2]", "!= 1: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(candidate_entries, entry) if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries,", "node7.setEntry(entry7) tree.insert(entry7) mbr8 = RawMBR(point8, (110, 200, 100), point8) node8 = RTreeNode(None, [],", "[x.getEntry() for x in start_rectangle_nodes] start_rectangle_to_close_ancestor_entries_dict = {} for start_rectangle_entry in start_rectangle_entries: start_rectangle_to_close_ancestor_entries_dict[start_rectangle_entry]", "= True) for priority_tagged_internal_entry in priority_tagged_internal_entries: priority, internal_entry = priority_tagged_internal_entry item = internal_entry", "node.getEntries() entry = parent.retrieveEntryForChild(node) children = [x.getChild() for x in curr_entries] mbr_list =", "\"\"\" if root.getNumChildren() == 1: # shorten tree entries = root.getEntries() chosen_entry =", "def setSplitHistoryRootDimension(self, dim): self.split_history_root_dimension = dim def getParent(self): return self.parent def getEntries(self): return", "RN that cover E.mbr # follow the corresponding subtrees unti lthe leaf L", "- x) >= m] window_size_pairs = [(window_left_sizes[i], len(entries) - window_left_sizes[i]) for i in", "x[0] == min_area] candidate_entries = [x[1] for x in candidate_tagged_area_values] return candidate_entries @staticmethod", "[curr_depth] else: overall_str_list = [] for entry in entries: child = entry.getChild() child_str", "(1 / 3.0) / denominator)) # for n = 14500 # x1 =", "= [x[0].getMarginValue() + x[1].getMarginValue() for x in upper_mbr_pairs] upper_margin_value_sum = sum(upper_margin_values) S_comp_value +=", "curr_depth: return False else: return True else: for curr_node in node.getChildren(): result =", "denominator)) # for n = 100 # k = int(round((1000 * math.log(1000, 2))", "[entry.getMBR()] for mbr in mbr_list: upper_left = mbr.getUpperLeft() lower_right = mbr.getLowerRight() x1, y1", "y1 * multiplier + offset next_x2 = x2 * multiplier + offset next_y2", "= [] self.getRectangleCloseDescendantsHelper(heap, reference_mbr, result_entry_list, reference_entry) return result_entry_list # def TopicKNearestNeighborBestFirstSearchHelper(self, heap, point,", "takes O(log(n)) time on average # for start rectangle taken from set of", "continue # item = curr_entry # internal_node_stack_deque.appendleft(item) priority = (first_priority_component, second_priority_component) priority_tagged_internal_entry =", "M, m) S_comp_dict = {} for i in xrange(entries[0].getMBR().getDimension()): low_comp_distributions, upper_comp_distributions = result[i]", "partial_result, without_borders) return partial_result def doOverlapQueryHelper(self, mbr, entry, partial_result, without_borders): if entry.getMBR().isRaw() ==", "# ignore node if enclosing mbr exists in conflict x-tree continue if entry", "comp_b1 if do_overlap == False: break return do_overlap @staticmethod def findOverlapArea(mbr_a, mbr_b): if", "node2, entry1, entry2) @staticmethod def rstarPreadjustTree(self, leaf_node): node = leaf_node parent = node.getParent()", "next_root_entry) next_root_entry.setChild(next_root) self.setRootEntry(next_root_entry) node1.setParent(next_root) node2.setParent(next_root) else: parent.removeEntry(node.getEntry()) parent.addEntry(entry1) parent.addEntry(entry2) # print \"split #2\"", "= 2 def xtreeInsertHelper(self, entry, node): split_status = None next_mbr = None if", "[x.getMBR() for x in entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) \"\"\" def rstarSplitNode(self, node, entry):", "\"supernode encountered\" parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) Q.append(node) # raise Exception() if node.getNumChildren() <=", "getEntries(self): return (self.child_to_entry_dict).values() def getEntryForChild(self, child_node): return (self.child_to_entry_dict)[child_node] def getChildren(self): return (self.child_to_entry_dict).keys() def", "self.findLeafHelper(entry, next_entry) if result == True: return result return False def delete(self, entry):", "mbr, without_borders) == True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for curr_entry in entries:", "curr_mbr.doesEnclose(reference_mbr) == True or reference_mbr.doesEnclose(curr_mbr) == True: # heapq.heappush(heap,pair) priority_tagged_internal_entries.sort(key = lambda x:", "window_left_sizes = [m - 1 + k for k in range(1, M -", "is found # finds one match if such a node exists # def", "xtreeOverlapMinimalSplit(self, node, entry): if node.getSplitHistoryRootDimension() == None: return (None, None, None, True) else:", "curr_entry.getMBR().doesEnclose(mbr) == True: self.doEnclosureQueryHelper(mbr, curr_entry, partial_result) def doEnclosureQueryWithEarlyStopping(self, mbr): result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, self.getRootEntry())", "= [curr_depth] else: overall_str_list = [] for entry in entries: child = entry.getChild()", "curr_entry in entry_group1: next_curr_node = curr_entry.getChild() if curr_entry != entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node1) for", "== True or reference_mbr.doesEnclose(curr_mbr) == True: # heapq.heappush(heap,pair) priority_tagged_internal_entries.sort(key = lambda x: x[0],", "[] self.doOverlapQueryHelper(mbr, self.getRootEntry(), partial_result, without_borders) return partial_result def doOverlapQueryHelper(self, mbr, entry, partial_result, without_borders):", "x: x.getMBR().getUpperLeft()[i]) low_distributions = [(low_sorted_entries[ : window_left_sizes[j]], low_sorted_entries[window_left_sizes[j] : ]) for j in", "for n = 20000 # k = int(round((14500 * math.log(14500, 2)) ** (1", "in entries: if curr_entry.getMBR().doesEnclose(mbr) == True: self.doEnclosureQueryHelper(mbr, curr_entry, partial_result) def doEnclosureQueryWithEarlyStopping(self, mbr): result", "\"(\" + string.join(overall_str_list, \" \") + \")\" return overall_str def chooseEntriesWithMinimalOverlapEnlargement(self, entries, entry):", "1: x1 = upper_left[0] x2 = lower_right[0] margin = x2 - x1 return", "def getIDValue(self): return self.id_value import string class RTree: def __init__(self): root_node = RTreeNode(None,", "entry.getChild() child_str = self.toEntriesArePresentStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list,", "xrange(entries[0].getMBR().getDimension()): low_comp_distributions, upper_comp_distributions = result[i] S_comp_value = 0 low_constituent_mbr_list_pairs = [([y.getMBR() for y", "node == None: return (False, []) else: parent = node.getParent() curr_entries = node.getEntries()", "+ [mbr] mbr = CompositeMBR(upper_left_point, lower_right_point, result_mbr_list) return mbr @staticmethod def getAreaEnlargement(base_mbr, mbr):", "on keeping the node if node.isUnderfull() == False: # print \"not underfull\" parent", "entry.getMBR().doesEnclose(mbr) == True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for curr_entry in entries: if", "int(x1 + random.random() * 100) # y2 = int(y1 + random.random() * 100)", "= upper_left_b[i] comp_b2 = lower_right_b[i] side = max(0, min(comp_a2, comp_b2) - max(comp_a1, comp_b1))", "x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) partner_entry = None if have_resulting_second_entry_from_split ==", "= (110, 100, 0) curr_mbr1 = RawMBR((100, 100, 0), (100, 100, 0), (100,", "return self.is_supernode def setToSuperNode(self, is_supernode): self.is_supernode = is_supernode def getSplitHistoryRootDimension(self): return self.split_history_root_dimension def", "x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) self.condenseTreeHelper(node.getParent(), Q) return # not tested", "base_mbr.getMBRList() + [mbr] mbr = CompositeMBR(upper_left_point, lower_right_point, result_mbr_list) return mbr @staticmethod def getAreaEnlargement(base_mbr,", "[x[0] for x in overlap_value_tagged_candidate_distributions] min_overlap_value = min(overlap_values) matching_overlap_value_tagged_candidate_distributions = [x for x", "= None # if node.isLeafNode() == True: candidate_entries = self.chooseEntriesWithMinimalOverlapEnlargement(entries, entry) if len(candidate_entries)", "+ lower_right) + [self.isRaw()]) return result def getDimension(self): return len(self.getUpperLeft()) def doesEnclose(self, mbr):", "0) lower_right = (10, 10) mbr = RawMBR(upper_left, lower_right, None) node = RTreeNode(None,", "chosen_entry.getChild() return self.chooseLeafHelper(entry, chosen_child) def rstarChooseLeaf(self, entry): return self.rstarChooseLeafHelper(entry, self.getRootEntry().getChild()) def rstarChooseLeafHelper(self, entry,", "max(components) min_components.append(min_comp_value) max_components.append(max_comp_value) upper_left_point = tuple(min_components) lower_right_point = tuple(max_components) result_mbr_list = base_mbr.getMBRList() +", "= vec self.id_value = id_value @staticmethod def toPoint(mbr): if mbr.getUpperLeft() != mbr.getLowerRight(): raise", "tagged_enlargement_values = [(MBR.findOverlapArea(x, mbr), x) for x in mbr_list] enlargement_values = [x[0] for", "entries[0 : 4]: # for entry in entries[0 : 15]: for entry in", "= entry.getChild() # root node never has a raw mbr # leaf is", "entry.getMBR() tagged_enlargement_values = [(MBR.getAreaEnlargement(x, mbr), x) for x in mbr_list] enlargement_values = [x[0]", "in conflict x-tree continue if entry == ignore_entry: # ignore node if its", "m = 8 and M = 16 # n = 6,000 works in", "all-start-rectangles close-ancestor finding, # which for a well-formed r-tree, takes O(n * log(n))", "100), point4) node4 = RTreeNode(None, [], True) entry4 = RTreeEntry(mbr4, node4) node4.setEntry(entry4) tree.insert(entry4)", "children to priority queue entries = node.getEntries() priority_tagged_internal_entries = [] for curr_entry in", "x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return else: # raise Exception() #", "raise Exception() window_left_sizes = [m - 1 + k for k in range(1,", "point4 = (60, 100, 0) point5 = (70, 100, 0) point6 = (80,", "y1, z1) # lower_right = (x2, y2, z2) upper_left = (x, y) lower_right", "def __init__(self): self.heap = [] def push(self, item, priority): pair = (priority,item) heapq.heappush(self.heap,pair)", "node to be found for a delete\") # if parent has zero entries", "in tagged_enlargement_values if x[0] == min_enlargement_value] candidate_entries = [mbr_to_entry_dict[x[1]] for x in candidate_tagged_enlargement_values]", "= [] if node.getNumChildren() == 0 else [node.getEntry().getMBR().toString(), str(node)] for entry in entries:", "= RawMBR(point4, (110, 200, 100), point4) node4 = RTreeNode(None, [], True) entry4 =", "comp_2a term = 2 * term1 * term2 surface_area += term margin =", "[self.isRaw()]) return result def getDimension(self): return len(self.getUpperLeft()) def doesEnclose(self, mbr): dimension = self.getDimension()", "(node.getParent() == None or (node.getParent() != None and node in node.getParent().getChildren())) == False", "= entry.getMBR() tagged_mbr_list = [] for curr_entry in entries: base_mbr = curr_entry.getMBR() curr_mbr", "print \"supernodes:\", [x for x in tree.getNodes() if x.isSuperNode() == True], tree.getRootEntry().getChild() #", "len(self.getUpperLeft()) def doesEnclose(self, mbr): dimension = self.getDimension() does_enclose = True for i in", "mbr_list: upper_left = mbr.getUpperLeft() lower_right = mbr.getLowerRight() x1, y1 = upper_left x2, y2", "surface_area return margin def toString(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() result =", "entry, partial_result): if entry.getMBR().isRaw() == True: # print mbr.toString(), entry.getMBR().toString() if mbr.doesEnclose(entry.getMBR()) ==", "node.getParent(), [entry], False) else: split_result = tree.rstarSplitNode(parent, partner_entry) l, ll, e, ee =", "[(x.getMBR(), x) for x in entries] tagged_overlapped_mbr_list = [x for x in tagged_mbr_list", "radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y)) else: image.strokeColor(color) image.fillColor(\"none\") image.strokeWidth(4) image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2))", "+ offset next_y1 = y1 * multiplier + offset next_x2 = x2 *", "without_borders) # returns entries def doEnclosureQuery(self, mbr): partial_result = [] self.doEnclosureQueryHelper(mbr, self.getRootEntry(), partial_result)", "in xrange(self.getDimension()): comp_1a = upper_left[i] comp_1b = lower_right[i] term1 = comp_1b - comp_1a", "lower_right_points = [x.getLowerRight() for x in mbr_list] points = upper_left_points + lower_right_points min_components", "= next_y1 + radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y)) else: image.strokeColor(color) image.fillColor(\"none\") image.strokeWidth(4) image.draw(PythonMagick.DrawableRectangle(next_x1,", "[] for start_rectangle_entry in start_rectangle_entries: close_descendant_entries = self.getRectangleCloseDescendants(start_rectangle_entry) for close_descendant_entry in close_descendant_entries: start_rectangle_to_close_ancestor_entries_dict[close_descendant_entry].append(start_rectangle_entry)", "# lower_rights = [(3, 10, 10), (1, 10, 10), (8, 10, 10), (6,", "math for i in xrange(n): upper_left = None lower_right = None \"\"\" if", "self.toNumChildrenStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \" \") +", "True else: entries = entry.getChild().getEntries() for curr_entry in entries: if curr_entry.getMBR().doesEnclose(mbr) == True:", "self.getRootEntry().getChild(): node.addEntry(entry) curr_node = entry.getChild() curr_node.setParent(node) mbr = CompositeMBR.makeMBR([entry.getMBR()]) node.getEntry().setMBR(mbr) # print \"no", "takes O(n * log(n)) time at worst; # assumes that rectangles are distinct", "None) root_entry = RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) def getRootEntry(self): return self.root_entry def setRootEntry(self,", "elif node.getNumChildren() == 0: pass return (RTree.NO_SPLIT, [node]) \"\"\" follow = self.chooseSubtree(entry, node).getChild()", "RTreeEntry(CompositeMBR.makeMBR(mbr_collection2), None) node2 = RTreeNode(parent, entry_collection2, None, entry2) entry2.setChild(node2) for curr_entry in entry_collection1:", "result curr_entry = node.getEntry() curr_mbr = curr_entry.getMBR() mbr = entry.getMBR() next_mbr = MBR.getEnlargedMBR(curr_mbr,", "traditional/non-traditional isLeafNode() distinction # updated on 2016-08-25 to fix overlap logic for determining", "mbr.getLowerRight() is_equal = upper_left1 == upper_left2 and lower_right1 == lower_right2 return is_equal class", "for curr_node in Q: curr_entry = curr_node.getEntry() # print \"mbr:\", curr_entry.getMBR().toString() # print", "+ offset next_x2 = x2 * multiplier + offset next_y2 = y2 *", "else: image.strokeColor(color) image.fillColor(\"none\") image.strokeWidth(4) image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2)) if len(entries) == 0: parent", "image, 0) \"\"\" image.strokeColor(\"orange\") image.fillColor(\"none\") image.strokeWidth(4) multiplier = 3 * 0.8 # offset", "leaf where the leaf mbr # is not contained by reference rectangle; #", "x * y, sides) return area @staticmethod def getEnlargedMBR(base_mbr, mbr): mbr_list = [base_mbr,", "overlap_area = MBR.findOverlapArea(mbr1, mbr2) area1 = mbr1.getArea() area2 = mbr2.getArea() union_area = area1", "in (ul_i, ul_i + 10000) # two strange things going on - saturation", "depth != 0: pass color_choice = depth % 3 color = None if", "= 14500 # x1 = int(100 + random.randint(0, k) * 100) # y1", "point1) node1 = RTreeNode(None, [], True) entry1 = RTreeEntry(mbr1, node1) node1.setEntry(entry1) tree.insert(entry1) mbr2", "comp_b1 = upper_left_b[i] comp_b2 = lower_right_b[i] side = max(0, min(comp_a2, comp_b2) - max(comp_a1,", "and x[1] <= M and x[1] >= m] for i in xrange(entries[0].getMBR().getDimension()): low_sorted_entries", "node): return (self.child_to_entry_dict)[node] def toString(self): return str(self.getEntries()) class RTreeEntry: def __init__(self, mbr, child):", "[(x[0][0].getArea() + x[0][1].getArea(), x[1]) for x in mbr_pair_tagged_next_candidate_distributions] combined_area_values = [x[0] for x", "for i in xrange(self.getDimension()): comp1 = upper_left[i] comp2 = lower_right[i] side = comp2", "self.is_leaf = is_leaf self.m = 8 self.M = 16 self.child_to_entry_dict = {} for", "[x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return else: # raise", "node2.setParent(next_root) else: parent.removeEntry(node.getEntry()) parent.addEntry(entry1) parent.addEntry(entry2) # print \"split #2\" return (RTree.SPLIT, [node1, node2])", "partial_result.append(entry) else: entries = entry.getChild().getEntries() for curr_entry in entries: if curr_entry.getMBR().doesEnclose(mbr) == True:", "True) entry = RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry) for i in xrange(1000): upper_left =", "True else 1) * curr_mbr_area # min-pq # priority = (first_priority_component, second_priority_component) if", "queue entries = node.getEntries() priority_tagged_internal_entries = [] for curr_entry in entries: # set", "M, m) candidate_distributions = None candidate_distributions = result[axis][0] + result[axis][1] mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR()", "term2 = comp_2b - comp_2a term = 2 * term1 * term2 surface_area", "lower_right margin = 2 * (x2 - x1) + 2 * (y2 -", "= RTreeNode(None, [], True) entry6 = RTreeEntry(mbr6, node6) node6.setEntry(entry6) tree.insert(entry6) mbr7 = RawMBR(point7,", "entry2) entry2.setChild(node2) for curr_entry in entry_collection1: curr_entry.getChild().setParent(node1) for curr_entry in entry_collection2: curr_entry.getChild().setParent(node2) node1.setSplitHistoryRootDimension(dimension)", "offset = (1536 * 0.2) / 2 next_x1, next_y1 = (multiplier * x1", "for x in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() == True] for keep_node in keep_nodes: Q.append(keep_node)", "else: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry)", "next_root = RTreeNode(None, [entry1, entry2], None, next_root_entry) next_root_entry.setChild(next_root) self.setRootEntry(next_root_entry) node1.setParent(next_root) node2.setParent(next_root) else: parent.removeEntry(node.getEntry())", "in overlap_value_tagged_candidate_distributions if x[0] == min_overlap_value] next_next_candidates = [x[1] for x in matching_overlap_value_tagged_candidate_distributions]", "x in tree.getNodes() if x.isSuperNode() == True], tree.getRootEntry().getChild() # tree2.draw() print len(tree2.getNodes()) import", "def getMBRList(self): return self.mbr_list def isComposite(self): return True @staticmethod def makeMBR(component_mbr_list): upper_left_points =", "look like leaf nodes # keep_nodes = [x for x in self.getNodesForNode(node) if", "len(result) for entry_to_close_ancestor_entry_list_pair in result.items(): entry, close_ancestor_entry_list = entry_to_close_ancestor_entry_list_pair print \"start rectangle:\", entry.getMBR().toString()", "node.addEntry(added_node.getEntry()) added_node.setParent(node) if split_status == RTree.SPLIT: # added_node.setParent(node) if node.getNumChildren() > node.getMaximumNumEntriesPerNode(): split_result", "\"\"\" entries = node.getEntries() mbr_list = [x.getMBR() for x in entries] tight_overall_mbr =", "offset next_y2 = y2 * multiplier + offset \"\"\" # image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2,", "962), (1293, 619, 1521)), \\ ((798, 928, 1028), (1762, 1795, 1309)), \\ ((225,", "S_comp_value d_S_pairs = S_comp_dict.items() min_S_value = min([x[1] for x in d_S_pairs]) min_S_value_d_S_pair_candidates =", "entry_group1: curr_entry.getChild().setParent(node1) for curr_entry in entry_group2: curr_entry.getChild().setParent(node2) mbr_group1 = [x.getMBR() for x in", "node; just look at parent of entry child if curr_entry.getMBR().isRaw() == True: if", "getRootEntry(self): return self.root_entry def setRootEntry(self, root_entry): self.root_entry = root_entry def hasConsistentNonTraditionalLeafDepthValues(self): root =", "in xrange(entries[0].getMBR().getDimension()): low_comp_distributions, upper_comp_distributions = result[i] S_comp_value = 0 low_constituent_mbr_list_pairs = [([y.getMBR() for", "== 1: x1 = upper_left[0] x2 = lower_right[0] margin = x2 - x1", "return self.m def getMaximumNumEntriesPerNode(self): return self.M def isFull(self): return self.getNumEntries() >= self.getMaximumNumEntriesPerNode() def", "None, split_history_root_dimension = None, is_supernode = False): self.parent = parent self.is_leaf = is_leaf", "1 NO_SPLIT = 2 def xtreeInsertHelper(self, entry, node): split_status = None next_mbr =", "= [x.getMBR() for x in entries] mbr = entry.getMBR() tagged_enlargement_values = [(MBR.getAreaEnlargement(x, mbr),", "for i in xrange(entries[0].getMBR().getDimension()): low_comp_distributions, upper_comp_distributions = result[i] S_comp_value = 0 low_constituent_mbr_list_pairs =", "= self.toDepthStringHelper(child, depth + 1) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" +", "None or (node.getParent() != None and node in node.getParent().getChildren())) == False else \"+\"", "1056, 1020), (1708, 1075, 1542)), \\ ((358, 815, 372), (761, 1089, 594)), \\", "in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) @staticmethod def rstarAdjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split): return", "x-tree result_entry_list.append(entry) raw_mbr = mbr next_mbr = raw_mbr.clone() next_node = RTreeNode(None, [], True)", "in result; # if we made it this far, we should add to", "= RTreeNode(None, [], True) entry7 = RTreeEntry(mbr7, node7) node7.setEntry(entry7) tree.insert(entry7) mbr8 = RawMBR(point8,", "def getNodesForNode(self, node): node_list = [] self.getNodesHelper(node, node_list) return node_list \"\"\" def getUnionArea(self):", "= depth + 1 return self.hasConsistentNonTraditionalLeafDepthValuesHelper(root, depth, 0) def hasConsistentNonTraditionalLeafDepthValuesHelper(self, node, depth, curr_depth):", "[x.getUpperLeft() for x in mbr_list] lower_right_points = [x.getLowerRight() for x in mbr_list] points", "= self.getUpperLeft() lower_right = self.getLowerRight() result = str(list(upper_left + lower_right) + [self.isRaw()]) return", "0.8 offset = (1536 * 0.2) / 2 next_x1, next_y1 = (multiplier *", "CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) self.condenseTreeHelper(node.getParent(), Q) return # not tested # returns entries # does", "def resolveEnlargementTie(self, entries, entry): mbr = entry.getMBR() tagged_mbr_list = [] for curr_entry in", "isTraditionalLeafNode(self): is_traditional_leaf_node = self.getNumEntries() == 0 return is_traditional_leaf_node \"\"\" def isLeafNode(self): # is_leaf_node", "do not increase domains and # high inter-group overlap means maximal disjointedness #", "= [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x in low_constituent_mbr_list_pairs] low_margin_values = [x[0].getMarginValue() + x[1].getMarginValue() for", "self.getVec()[d] def getIDValue(self): return self.id_value import string class RTree: def __init__(self): root_node =", "[], True) entry7 = RTreeEntry(mbr7, node7) node7.setEntry(entry7) tree.insert(entry7) mbr8 = RawMBR(point8, (110, 200,", "(20, 20) lower_right = (40, 40) mbr = RawMBR(upper_left, lower_right, None) node =", "1040x slower) # n = 20000 # 230.0411 seconds (~538x slower for 200x", "max(comp_a1, comp_b1)) sides.append(side) intersection_volume = reduce(lambda x, y: x * y, sides) return", "# lower_right = ul_lr_pairs[i][1] # x = int(random.randint(1, 100)) # y = 10", "curr_mbr.isRaw() == True: priority = -1 * curr_mbr_area item = curr_entry pair =", "max_components = [] for i in xrange(component_mbr_list[0].getDimension()): components = [x[i] for x in", "not the case when we initially insert parent = node.getParent() entry1 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection1),", "return self.child def setChild(self, node): self.child = node @staticmethod def draw(tree, entries, image,", "lower_right = None \"\"\" if i % 4 == 0: upper_left = (0,", "entries[0 : 15]: for entry in entries: # if len(tree.getNodes()) != 0: #", "condenseTree(self, leaf_node): Q = [] self.condenseTreeHelper(leaf_node, Q) # Q is in order of", "and x[0] >= m and x[1] <= M and x[1] >= m] for", "entries: if MBR.doOverlap(curr_entry.getMBR(), entry.getMBR()) == True: result = self.findLeafHelper(entry, next_entry) if result ==", "[x.getUpperLeft() for x in component_mbr_list] lower_right_points = [x.getLowerRight() for x in component_mbr_list] points", "True: # print \"underfull\" parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) # don't use isLeafNode() for", "= mbr.getLowerRight() is_equal = upper_left1 == upper_left2 and lower_right1 == lower_right2 return is_equal", "else: entries = node.getEntries() candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries", "y) # upper_left = ul_lr_pairs[i][0] # lower_right = ul_lr_pairs[i][1] # x = int(random.randint(1,", "# note that we assume rectangles are unique for close-descendant # and close-ancestor", "# repeatedly pop nodes, prune using enclosure/containment # w.r.t. reference rectangle, add children", "mbr) tagged_mbr_list.append((curr_mbr, curr_entry)) tagged_area_values = [(x[0].getArea(), x[1]) for x in tagged_mbr_list] area_values =", "= 1 / (1.0 * 6.5) * 0.8 offset = (1536 * 0.2)", "False: if curr_mbr.doesEnclose(reference_mbr) == False and reference_mbr.doesEnclose(curr_mbr) == False: continue # item =", "mbr = entry.getMBR() if mbr.doesEnclose(reference_mbr) == False and reference_mbr.doesEnclose(mbr) == False: # ignore", "makeMBR(component_mbr_list): upper_left_points = [x.getUpperLeft() for x in component_mbr_list] lower_right_points = [x.getLowerRight() for x", "False and reference_mbr.doesEnclose(mbr) == False: # ignore node if associated mbr does not", "in range(len(window_left_sizes))] window_size_pairs = [x for x in window_size_pairs if x[0] <= M", "# assumes that rectangles are distinct def getAllRectangleCloseAncestors(self): start_rectangle_nodes = [x for x", "new root its only child pass def condenseTree(self, leaf_node): Q = [] self.condenseTreeHelper(leaf_node,", "import deque # min-pq class PriorityQueue: def __init__(self): self.heap = [] def push(self,", "class RTreeEntry: def __init__(self, mbr, child): self.mbr = mbr self.child = child def", "self.getRootEntry() curr_entries = entry.getChild().getEntries() children = [x.getChild() for x in curr_entries] mbr_list =", "depth): node = entry.getChild() entries = node.getEntries() mbr_list = [entry.getMBR()] for mbr in", "have_resulting_second_entry_from_split == True: first_entry, second_entry = resulting_entries_from_split partner_entry = second_entry if have_resulting_second_entry_from_split ==", "elif node.isNonTraditionalLeafNode() == True: node.addEntry(entry) entry.getChild().setParent(node) \"\"\" elif node.getNumChildren() == 0: pass return", "result = tree.getAllRectangleCloseAncestors() print result print len(result) for entry_to_close_ancestor_entry_list_pair in result.items(): entry, close_ancestor_entry_list", "= split_result return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [e, ee], True) else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry],", "root_mbr_area = root_mbr.getArea() first_priority_component = 0 if root_mbr_is_contained == True else 1 second_priority_component", "RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry) for i in xrange(1000): upper_left = (0, 0) lower_right", "if len(entries) == 0: parent = entry.getChild().getParent() mbr = entry.getMBR() location = Point.toPoint(mbr)", "= RTreeNode(None, resulting_entries_from_split, False, self.getRootEntry()) l.setParent(next_root) ll.setParent(next_root) self.getRootEntry().setChild(next_root) else: pass MAX_OVERLAP_RATIO = 0.2", "that M of two works import sys # import PythonMagick import heapq from", "/ denominator)) # for n = 100 # k = int(round((1000 * math.log(1000,", "[(MBR.findOverlapArea(x[0][0], x[0][1]), x[1]) for x in mbr_pair_tagged_candidate_distributions] overlap_values = [x[0] for x in", "upper_left = None lower_right = None \"\"\" if i % 4 == 0:", "candidate_entries[0] chosen_child = chosen_entry.getChild() return chosen_entry def xtreeInsert(self, entry): # print \"insert\" return", "(x, y) lower_right = (x, y) # upper_left = ul_lr_pairs[i][0] # lower_right =", "self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) chosen_entry = candidate_entries[0]", "raise Exception() return (False, None, None, None) dimension = None result1 = self.xtreeTopologicalSplit(node,", "1838, 669)), \\ ((879, 319, 789), (1877, 744, 791)), \\ ((1081, 1056, 1020),", "= curr_entry.getChild() if curr_entry != entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node1) for curr_entry in entry_group2: next_curr_node", "1) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \" \") +", "candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return self.chooseLeafHelper(entry, chosen_child)", "[], True) next_entry = RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) conflict_x_tree.insert(next_entry) elif node.isLeafNode() == False: #", "min_enlargement_value = min(enlargement_values) candidate_tagged_enlargement_values = [x for x in tagged_enlargement_values if x[0] ==", "if containment query for conflict x-tree returns entries matching_entries = conflict_x_tree.doContainmentQuery(mbr) for matching_entry", "# note that nodes always point to same entries # unless we explicitly", "> 1: next_candidate_distributions = next_next_candidates mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for y in x[0]], [y.getMBR()", "# and associated mbr is not contained within reference mbr continue if conflict_x_tree.doEnclosureQueryWithEarlyStopping(mbr)", "slower for 10x growth; expected 33x slower) # n = 5500 # 23.899", "[mbr_to_entry_dict[x[1]] for x in candidate_tagged_enlargement_values] return candidate_entries def chooseEntriesWithMinimalAreaEnlargement(self, entries, entry): mbr_to_entry_dict =", "x[1]) for x in mbr_list_pair_tagged_candidate_distributions] combined_area_tagged_next_candidate_distributions = [(x[0][0].getArea() + x[0][1].getArea(), x[1]) for x", "entry.getChild() (self.child_to_entry_dict)[curr_child] = entry def removeEntry(self, entry): curr_child = entry.getChild() (self.child_to_entry_dict).pop(curr_child) def getMinimumNumEntriesPerNode(self):", "for entry in entries: tree2.insert(entry) \"\"\" if entry.getChild().getParent() == None: raise Exception() \"\"\"", "[(window_left_sizes[i], len(entries) - window_left_sizes[i]) for i in range(len(window_left_sizes))] window_size_pairs = [x for x", "sides = [] for i in xrange(self.getDimension()): comp1 = upper_left[i] comp2 = lower_right[i]", "return (False, None, None, dimension) else: return (True, entry_collection3, entry_collection4, dimension) else: return", "Exception() if self.getDimension() == 1: x1 = upper_left[0] x2 = lower_right[0] margin =", "node5.setEntry(entry5) tree.insert(entry5) mbr6 = RawMBR(point6, (110, 200, 100), point6) node6 = RTreeNode(None, [],", "M, m) result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2 = result next_result", "230.0411 seconds (~538x slower for 200x growth; expected 1528x slower) # n =", "10000 # k = int(round((20000 * math.log(20000, 2)) ** (1 / 3.0) /", "= RTree() internal_node_stack_deque = deque() # while len(heap) != 0: while len(internal_node_stack_deque) !=", "node5 = RTreeNode(None, [], True) entry5 = RTreeEntry(mbr5, node5) node5.setEntry(entry5) tree.insert(entry5) mbr6 =", "mbr does not enclose reference mbr # and associated mbr is not contained", "= (priority,item) heapq.heappush(heap,pair) # print entry_pq # raise Exception() result_entry_list = [] self.getRectangleCloseDescendantsHelper(heap,", "1 # k = int(round(denominator / denominator)) # for n = 100 #", "in x[0]], [y.getMBR() for y in x[1]]) for x in upper_comp_distributions] upper_mbr_pairs =", "# set as new root its only child pass def condenseTree(self, leaf_node): Q", "len(self.child_to_entry_dict) def getNumChildren(self): return self.getNumEntries() def setParent(self, node): self.parent = node def isNonTraditionalLeafNode(self):", "== True: if entry == curr_entry: return True else: return False else: entries", "growth; expected 1528x slower) # n = 2000 # n = 1000 #", "M, m) entry_group1, entry_group2 = result next_result = (entry_group1, entry_group2, axis) return next_result", "in entries] entry.draw(tree, entries, image, depth + 1) class MBR: def __init__(self, upper_left,", "reference rectangle, add children to priority queue, # ignore if contained rectangle is", "queue for leaf nodes # updated on 2016-11-16 to fix margin calculation #", "= RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry) for i in xrange(1000): upper_left = (0, 0)", "print \"split #2\" return (RTree.SPLIT, [node1, node2]) else: self.xtreeSupernodeInsert(node, [x.getEntry() for x in", "x in tagged_mbr_list if MBR.doOverlap(curr_mbr, x[0]) == True] for tagged_overlapped_mbr in tagged_overlapped_mbr_list: curr_mbr,", "# n = 10000 # 84.222 seconds (~197x slower for 100x growth; expected", "time1 = time.time() result = tree2.getAllRectangleCloseAncestors() time2 = time.time() time_diff = time2 -", "len(heap) != 0: while len(internal_node_stack_deque) != 0 or len(heap) != 0: # entry", "area = reduce(lambda x, y: x * y, sides) return area @staticmethod def", "279, 1521)), \\ ((332, 886, 493), (822, 1305, 1149)), \\ ((800, 709, 871),", "\"start rectangle:\", entry.getMBR().toString() for close_ancestor_entry in close_ancestor_entry_list: print \"close ancestor:\", close_ancestor_entry.getMBR().toString() if __name__", "# best-first priority queue for leaf nodes # updated on 2016-11-16 to fix", "= RTreeEntry(mbr, node) node.setEntry(entry) # entries.append(entry) \"\"\" # for entry in entries[0 :", "min_components = [] max_components = [] for i in xrange(component_mbr_list[0].getDimension()): components = [x[i]", "+ 1 return self.hasConsistentNonTraditionalLeafDepthValuesHelper(root, depth, 0) def hasConsistentNonTraditionalLeafDepthValuesHelper(self, node, depth, curr_depth): if node", "def __init__(self, upper_left, lower_right, mbr_list): MBR.__init__(self, upper_left, lower_right) self.mbr_list = mbr_list def getMBRList(self):", "[e, ee], True) ended_with_split2, resulting_entries_from_split = adjust_result if ended_with_split2 == True: e, ee", "if node == self.getRootEntry().getChild(): return node else: return node.getParent() else: entries = node.getEntries()", ">= m] for i in xrange(entries[0].getMBR().getDimension()): low_sorted_entries = entries[ : ] low_sorted_entries.sort(key =", "return None \"\"\" # a little stilted since we don't need a O(log(n))", "perimeter_x = next_x1 perimeter_y = next_y1 + radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y)) else:", "entries, image, depth): for entry in entries: RTreeEntry.drawHelper(tree, entry, image, depth) @staticmethod def", "= (100, 100) lower_right = (120, 120) \"\"\" denominator = (100 * math.log(100,", "Exception(\"attempted to turn a non-point mbr to a point\") return mbr.getUpperLeft() def getVec(self):", "point2 change_x = x2 - x1 change_y = y2 - y1 distance =", "conflict_x_tree = RTree() internal_node_stack_deque = deque() # while len(heap) != 0: while len(internal_node_stack_deque)", "x in upper_mbr_pairs] upper_margin_value_sum = sum(upper_margin_values) S_comp_value += upper_margin_value_sum S_comp_dict[i] = S_comp_value d_S_pairs", "RTreeNode(parent, entry_collection1, None, entry1) entry1.setChild(node1) entry2 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection2), None) node2 = RTreeNode(parent, entry_collection2,", "0), (100, 100, 0), 1)) tree = RTree() print tree.toString() curr_root = tree.getRootEntry().getChild()", "974, 724), (1802, 1524, 1378)), \\ ((911, 953, 196), (1776, 1662, 455)), \\", "def isRaw(self): return False def isComposite(self): return False def getUpperLeft(self): return self.upper_left def", "point7 = (90, 100, 0) point8 = (110, 100, 0) curr_mbr1 = RawMBR((100,", "= [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for x in mbr_list_pair_tagged_candidate_distributions] overlap_value_tagged_candidate_distributions = [(MBR.findOverlapArea(x[0][0], x[0][1]), x[1])", "if MBR.doOverlap(curr_mbr, x[0]) == True] for tagged_overlapped_mbr in tagged_overlapped_mbr_list: curr_mbr, curr_entry = tagged_overlapped_mbr", "= (x1, y1, z1) # lower_right = (x2, y2, z2) upper_left = (x,", "def rstarPreadjustTree(self, leaf_node): node = leaf_node parent = node.getParent() if parent != None:", "comp_1a for j in xrange(i + 1, self.getDimension()): comp_2a = upper_left[j] comp_2b =", "candidate_entries[0] chosen_child = chosen_entry.getChild() return self.chooseLeafHelper(entry, chosen_child) def rstarChooseLeaf(self, entry): return self.rstarChooseLeafHelper(entry, self.getRootEntry().getChild())", "comp_a2 = lower_right_a[i] comp_b1 = upper_left_b[i] comp_b2 = lower_right_b[i] side = max(0, min(comp_a2,", "= 16 self.child_to_entry_dict = {} for curr_entry in entries: curr_child = curr_entry.getChild() (self.child_to_entry_dict)[curr_child]", "node.isSuperNode() == False: node.setToSuperNode(True) # questionable if this is really necessary for entry", "second_priority_component = (-1 if curr_mbr_is_contained == True else 1) * curr_mbr_area # min-pq", "curr_entry pair = (priority,item) heapq.heappush(heap,pair) elif curr_mbr.isRaw() == False: if curr_mbr.doesEnclose(reference_mbr) == False", "node != self.getRootEntry().getChild(): parent.addEntry(entry1) parent.addEntry(entry2) node1.setParent(parent) node2.setParent(parent) else: next_root = RTreeNode(None, [entry1, entry2],", "(y2 - y1) return margin surface_area = 0 for i in xrange(self.getDimension()): comp_1a", "ovelap_ratio = None if union_area == 0: if mbr1.isEqualTo(mbr2) == True: overlap_ratio =", "* math.log(5500, 2)) ** (1 / 3.0) / denominator)) # for n =", "455)), \\ ((596, 892, 131), (1543, 1838, 669)), \\ ((879, 319, 789), (1877,", "# def delete(self, E, RN): def findLeaf(self, entry): return self.findLeafHelper(entry, self.getRootEntry()) def findLeafHelper(self,", "note that M of two works import sys # import PythonMagick import heapq", "= point1 x2, y2 = point2 change_x = x2 - x1 change_y =", "if node == None: return (False, []) else: parent = node.getParent() curr_entries =", "actual rectangles to conflict x-tree, # use as priority (prefer_contained, prefer_large_area_if_contained_else_small) if self.getRootEntry().getChild().getNumChildren()", "upper_sorted_entries.sort(key = lambda x: x.getMBR().getLowerRight()[i]) upper_distributions = [(upper_sorted_entries[ : window_left_sizes[j]], upper_sorted_entries[window_left_sizes[j] : ])", "parent.addEntry(entry1) parent.addEntry(entry2) # print \"split #2\" return (RTree.SPLIT, [node1, node2]) else: self.xtreeSupernodeInsert(node, [x.getEntry()", "next_next_candidates[0] return chosen_distribution_pair def chooseLeaf(self, entry): return self.chooseLeafHelper(entry, self.getRootEntry().getChild()) def chooseLeafHelper(self, entry, node):", "# call algorithm condenseTree(L) # if the root has only one child (and", "and (node in parent.getChildren()): pass \"\"\" node1 = RTreeNode(parent, entry_group1, prev_leaf_status) node2 =", "that nodes always point to same entries # unless we explicitly create new", "color = None if color_choice == 0: color = PythonMagick.Color(65535, 0, 0, 32767)", "2 next_x = multiplier * x next_y = multiplier * y image.strokeColor(\"none\") image.fillColor(\"black\")", "node if self.getRootEntry().getChild().getNumChildren() == 0: root_node = RTreeNode(None, [], True) root_mbr = CompositeMBR(None,", "def getMBRList(self): return [self] def clone(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() contained_item", "entry = item node = entry.getChild() mbr = entry.getMBR() if mbr.doesEnclose(reference_mbr) == False", "intersection_volume def getMarginValue(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() if self.getDimension() == 0:", "in xrange(4): \"\"\" ul_lr_pairs = [((797, 989, 602), (910, 1248, 1035)), \\ ((920,", "= mbr_b.getLowerRight() dimension = mbr_a.getDimension() sides = [] for i in xrange(dimension): comp_a1", "toDepthString(self): root = self.getRootEntry().getChild() return self.toDepthStringHelper(root, 0) def toDepthStringHelper(self, node, depth): if node", "* m + 2 + 1)] window_left_sizes = [x for x in window_left_sizes", "curr_entries] mbr_list = [x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) partner_entry", "RTreeEntry(mbr2, node2) node2.setEntry(entry2) tree.insert(entry2) mbr3 = RawMBR(point3, (110, 200, 100), point3) node3 =", "CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return (have_resulting_second_entry_from_split, resulting_entries_from_split) else: parent = node.getParent() curr_entries = node.getEntries() entry", "curr_entry.getChild().getEntries() for next_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), entry.getMBR()) == True: result = self.findLeafHelper(entry,", "assume that rectangles never have negative area for i in xrange(mbr_a.getDimension()): # a", "if union_area == 0: if mbr1.isEqualTo(mbr2) == True: overlap_ratio = 1 else: overlap_ratio", "= depth % 3 color = None if color_choice == 0: color =", "= tree.splitNode(parent, partner_entry) l, ll, e, ee = split_result return tree.adjustTree(tree, l, [e,", "True else: for curr_node in node.getChildren(): result = self.hasConsistentNonTraditionalLeafDepthValuesHelper(curr_node, depth, curr_depth + 1)", "[entry], False) else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(), resulting_entries_from_split, have_resulting_second_entry_from_split) \"\"\" @staticmethod def adjustTree(tree, node,", "toPoint(mbr): if mbr.getUpperLeft() != mbr.getLowerRight(): raise Exception(\"attempted to turn a non-point mbr to", "and node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): node.setToSuperNode(False) if node.getParent() == None: # we are a", "raise Exception() result_entry_list = [] self.getRectangleCloseDescendantsHelper(heap, reference_mbr, result_entry_list, reference_entry) return result_entry_list # def", "== False: # ignore node if associated mbr does not enclose reference mbr", "time.time() result = tree2.getAllRectangleCloseAncestors() time2 = time.time() time_diff = time2 - time1 print", "= entries[0] chosen_child = chosen_entry.getChild() self.setRoot(chosen_child) \"\"\" # if RN is a leaf", "None) node = RTreeNode(None, [], True) entry = RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry) for", "getLowerRight(self): return self.lower_right def getArea(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() sides =", "(x1, y1, z1) # lower_right = (x2, y2, z2) upper_left = (x, y)", "partial_result) return partial_result def doContainmentQueryHelper(self, mbr, entry, partial_result): if entry.getMBR().isRaw() == True: #", "= math.sqrt(change_x ** 2 + change_y ** 2) return distance class RTreeNode: def", "(multiplier * x1 + offset, multiplier * y1 + offset) next_x2, next_y2 =", "x in window_left_sizes if x <= M and x >= m and (len(entries)", "\"\"\" tree.delete(entry1) tree.delete(entry2) tree.delete(entry3) tree.delete(entry4) tree.delete(entry5) tree.delete(entry6) tree.delete(entry7) tree.delete(entry8) \"\"\" print tree.toString() tree2", "parent.addEntry(entry2) # print \"split #2\" return (RTree.SPLIT, [node1, node2]) else: self.xtreeSupernodeInsert(node, [x.getEntry() for", "2016-08-21 # x-tree featuring enclosure and containment queries # dimension is implicit (determined", "mbr1.getArea() area2 = mbr2.getArea() union_area = area1 + area2 - overlap_area ovelap_ratio =", "next_y2)) if len(entries) == 0: parent = entry.getChild().getParent() mbr = entry.getMBR() location =", "True) next_entry = RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) curr_x_tree.insert(next_entry) union_area = curr_x_tree.getUnionArea() multi_overlap_ratio = overlap_area_sum", "0 def peek(self): heap = self.heap pair = heap[0] result = pair return", "item = internal_node_stack_deque.popleft() # (priority,item) = heapq.heappop(heap) entry = item node = entry.getChild()", "# k = int(round(denominator / denominator)) # for n = 100 # k", "min_combined_area_value] next_next_candidates = [x[1] for x in matching_combined_area_tagged_next_candidate_distributions] chosen_distribution_pair = next_next_candidates[0] return chosen_distribution_pair", "None, next_root_entry) next_root_entry.setChild(next_root) self.setRootEntry(next_root_entry) node1.setParent(next_root) node2.setParent(next_root) else: parent.removeEntry(node.getEntry()) parent.addEntry(entry1) parent.addEntry(entry2) # print \"split", "split a super-node if node.isSuperNode() == True: # raise Exception() return (False, None,", "(768, 768), \"white\") draw = ImageDraw.Draw(im) root = self.getRoot() root.draw(self, draw, 0) im.save(\"tree.png\",", "+ random.randint(0, k) * 100) # y1 = int(100 + random.randint(0, k) *", "(80, 80) elif i % 4 == 3: upper_left = (100, 100) lower_right", "for x in upper_comp_distributions] upper_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x in upper_constituent_mbr_list_pairs] upper_margin_values", "== True and node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): node.setToSuperNode(False) if node.getParent() == None: # we", "node.getParent(), resulting_entries_from_split, have_resulting_second_entry_from_split) \"\"\" @staticmethod def adjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split, is_first_call_after_first_pass): if node", "self.getLowerRight() sides = [] for i in xrange(self.getDimension()): comp1 = upper_left[i] comp2 =", "= curr_mbr.getArea() first_priority_component = 0 if curr_mbr_is_contained == True else 1 second_priority_component =", "* union_area) # raise Exception() if overlap_ratio > RTree.MAX_OVERLAP_RATIO: # raise Exception() result2", "at parent of entry child if curr_entry.getMBR().isRaw() == True: if entry == curr_entry:", "curr_entry.getChild() result = self.findLeafHelper(entry, curr_node) if result == None: continue else: return curr_node", "if we plan on keeping the node if node.isUnderfull() == False: # print", "saturation occurs # if we increase n and do not increase domains and", "1755, 320)), \\ ((945, 260, 1091), (1932, 332, 1133)), \\ ((262, 221, 872),", "lower_right self.id_value = id_value def getUpperLeft(self): return self.upper_left def getLowerRight(self): return self.lower_right def", "- base_mbr_area return area_change @staticmethod def doOverlap(mbr_a, mbr_b, without_borders = False): upper_left_a =", "curr_mbr_is_contained = reference_mbr.doesEnclose(curr_mbr) curr_mbr_area = curr_mbr.getArea() first_priority_component = 0 if curr_mbr_is_contained == True", "# 2016-08-21 # x-tree featuring enclosure and containment queries # dimension is implicit", "or leaves in r-tree; these times assume \"maximal disjointedness\" # and depth-first stack", "and add to priority queue curr_node = curr_entry.getChild() curr_mbr = curr_entry.getMBR() curr_mbr_is_actual =", "def makeMBRFromPoint(point): upper_left = point lower_right = point result_mbr = RawMBR(upper_left, lower_right, point)", "entry) \"\"\" else: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries =", "10000) y = random.randint(0, 10000) # upper_left = (x1, y1, z1) # lower_right", "node.getMaximumNumEntriesPerNode(): mbr_list = [x.getMBR() in node.getEntries()] curr_x_tree = RTree() overlap_area_sum = sum([x.getArea() for", "reference mbr continue if conflict_x_tree.doEnclosureQueryWithEarlyStopping(mbr) == True: # ignore node if enclosing mbr", "we are a root node if self.getRootEntry().getChild().getNumChildren() == 0: root_node = RTreeNode(None, [],", "def drawHelper(tree, entry, image, depth): node = entry.getChild() entries = node.getEntries() mbr_list =", "for close_descendant_entry in close_descendant_entries: start_rectangle_to_close_ancestor_entries_dict[close_descendant_entry].append(start_rectangle_entry) return start_rectangle_to_close_ancestor_entries_dict def draw(self): # im = Image.new(\"RGB\",", "# x = int(random.randint(1, 100)) # y = 10 # z = 10", "lower_right): self.upper_left = upper_left self.lower_right = lower_right def isRaw(self): return False def isComposite(self):", "= RawMBR(curr_mbr.getUpperLeft(), curr_mbr.getLowerRight(), None) next_node = RTreeNode(None, [], True) next_entry = RTreeEntry(next_mbr, next_node)", "in xrange(self.getDimension()): comp1 = upper_left[i] comp2 = lower_right[i] side = comp2 - comp1", "= upper_left[j] comp_2b = lower_right[j] term2 = comp_2b - comp_2a term = 2", "= node.getChildren() have_node_str = True overall_str_list = None if have_node_str == True: curr_leaf_status", "x.getEntry().getMBR().isRaw() == True] for keep_node in keep_nodes: Q.append(keep_node) # only makes sense to", "= node == self.getRootEntry().getChild() if is_root_node == True: have_node_str = True overall_str_list =", "initially insert parent = node.getParent() entry1 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection1), None) node1 = RTreeNode(parent, entry_collection1,", "min-pq priority = (first_priority_component, second_priority_component) # priority = -1 * root_mbr_area # entry_pq", "# which is not the case when we initially insert parent = node.getParent()", "perimeter_x, perimeter_y)) else: image.strokeColor(color) image.fillColor(\"none\") image.strokeWidth(4) image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2)) if len(entries) ==", "i in xrange(mbr_a.getDimension()): # a \"left\" comp_a1 = min(upper_left_a[i], lower_right_a[i]) # a \"right\"", "989, 602), (910, 1248, 1035)), \\ ((920, 974, 724), (1802, 1524, 1378)), \\", "result result = tree.getAllRectangleCloseAncestors() print result print len(result) for entry_to_close_ancestor_entry_list_pair in result.items(): entry,", "CompositeMBR.makeMBR(x[1])) for x in upper_constituent_mbr_list_pairs] upper_margin_values = [x[0].getMarginValue() + x[1].getMarginValue() for x in", "partner_entry.getChild() partner_entries = partner_node.getEntries() partner_children = [x.getChild() for x in partner_entries] partner_mbr_list =", "if color_choice == 0: color = PythonMagick.Color(65535, 0, 0, 32767) elif color_choice ==", ": 15]: for entry in entries: # if len(tree.getNodes()) != 0: # print", "x in entry_collection1] mbr_collection2 = [x.getMBR() for x in entry_collection2] # this line", "[x.getChild() for x in entries] entry.draw(tree, entries, image, depth + 1) class MBR:", "True] for tagged_overlapped_mbr in tagged_overlapped_mbr_list: curr_mbr, curr_entry = tagged_overlapped_mbr curr_node = curr_entry.getChild() result", "669)), \\ ((879, 319, 789), (1877, 744, 791)), \\ ((1081, 1056, 1020), (1708,", "6.5) * 0.8 offset = (1536 * 0.2) / 2 next_x1, next_y1 =", "pop(self): (priority,item) = heapq.heappop(self.heap) return item def isEmpty(self): return len(self.heap) == 0 def", "addEntry(self, entry): curr_child = entry.getChild() (self.child_to_entry_dict)[curr_child] = entry def removeEntry(self, entry): curr_child =", "entry.getChild() # root node never has a raw mbr # leaf is a", "791)), \\ ((1081, 1056, 1020), (1708, 1075, 1542)), \\ ((358, 815, 372), (761,", "return self.toStringHelper(root) def toStringHelper(self, node): if node == None: return \"\" entries =", "= 1000 # n = 20000 n = 1000 import math for i", "is crucial # if node.isNonTraditionalLeafNode() == False: # this is idempotent for added_node", "!= 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\" else: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if", "in x[1]]), x) for x in next_candidate_distributions] mbr_pair_tagged_next_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for", "== True: if entry.getMBR().doesEnclose(mbr) == True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for curr_entry", "= [x[1] for x in matching_overlap_value_tagged_candidate_distributions] if len(matching_overlap_value_tagged_candidate_distributions) > 1: next_candidate_distributions = next_next_candidates", "\\ ((911, 953, 196), (1776, 1662, 455)), \\ ((596, 892, 131), (1543, 1838,", "\"left\" comp_a1 = min(upper_left_a[i], lower_right_a[i]) # a \"right\" comp_a2 = max(upper_left_a[i], lower_right_a[i]) #", "== min_area] candidate_entries = [x[1] for x in candidate_tagged_area_values] return candidate_entries @staticmethod def", "entry_pq.push(root_entry, priority) item = root_entry pair = (priority,item) heapq.heappush(heap,pair) # print entry_pq #", "\"\"\" def isLeafNode(self): # is_leaf_node = (self.getParent() == None and self.getNumChildren() == 0)", "[x for x in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() == True] for keep_node in keep_nodes:", "rstarInsert(self, entry): leaf_node = self.rstarChooseLeaf(entry) adjust_result = None if leaf_node.isFull() == False: leaf_node.addEntry(entry)", "= result2 # raise Exception() if do_fail == True or len(entry_collection3) < node.getMinimumNumEntriesPerNode()", "return self.lower_right def getIDValue(self): return self.id_value class Point: def __init__(self, vec, id_value): self.vec", "* 100) # z2 = int(z1 + random.random() * 100) x = random.randint(0,", "for curr_entry in entry_group2: curr_entry.getChild().setParent(node2) mbr_group1 = [x.getMBR() for x in entry_group1] mbr_group2", "we never split a super-node # updated on 2016-08-23 to fix traditional/non-traditional isLeafNode()", "x in entry_collection2] # this line presumes that we have parent set correctly", "all entries of RN that cover E.mbr # follow the corresponding subtrees unti", "image, depth + 1) class MBR: def __init__(self, upper_left, lower_right): self.upper_left = upper_left", "None: curr_entries = node.getEntries() entry = node.getParent().retrieveEntryForChild(node) children = [x.getChild() for x in", "result return False def delete(self, entry): # print \"hello\" did_find_leaf = self.findLeaf(entry) child_node", "entry.setMBR(tight_overall_mbr) partner_entry = None if have_resulting_second_entry_from_split == True: first_entry, second_entry = resulting_entries_from_split partner_entry", "root_mbr_area # entry_pq = PriorityQueue() heap = [] # entry_pq.push(root_entry, priority) item =", "0.2) / 2 next_x1, next_y1 = (multiplier * x1 + offset, multiplier *", "priority queue, # ignore if contained rectangle is contained by a rectangle in", "mbr6 = RawMBR(point6, (110, 200, 100), point6) node6 = RTreeNode(None, [], True) entry6", "unti lthe leaf L that contains E is found # remove E from", "mbr): mbr_list = [base_mbr, mbr] upper_left_points = [x.getUpperLeft() for x in mbr_list] lower_right_points", "this takes O(n * log(n)) time, # where n is number of actual", "= min(upper_left_b[i], lower_right_b[i]) # b \"right\" comp_b2 = max(upper_left_b[i], lower_right_b[i]) # print comp_a1,", "!= 0 else str(depth) overall_str_list = [curr_depth] else: overall_str_list = [] for entry", "comp_a1 = upper_left_a[i] comp_a2 = lower_right_a[i] comp_b1 = upper_left_b[i] comp_b2 = lower_right_b[i] side", "returns a node, which can be None if no match is found #", "= multiplier * y image.strokeColor(\"none\") image.fillColor(\"black\") center_x = next_x + offset center_y =", "if node.isLeafNode() == True: # could have a safe path to a leaf", "in entries: tree2.insert(entry) \"\"\" if entry.getChild().getParent() == None: raise Exception() \"\"\" # print", "= None if len(heap) != 0: (priority,item) = heapq.heappop(heap) elif len(internal_node_stack_deque) != 0:", "draw(tree, entries, image, depth): for entry in entries: RTreeEntry.drawHelper(tree, entry, image, depth) @staticmethod", "color = PythonMagick.Color(65535, 0, 0, 32767) elif color_choice == 1: color = PythonMagick.Color(0,", "child_node.getParent() if entry != self.getRootEntry() else None if leaf_node == None: raise Exception(\"expected", "1) * curr_mbr_area # min-pq # priority = (first_priority_component, second_priority_component) if curr_mbr.isRaw() ==", "in order of low-level to high-level; # wish to insert using order of", "and comp_a2 > comp_b1 else: do_overlap = do_overlap and comp_a1 <= comp_b2 and", "[x[1] for x in candidate_tagged_area_values] return candidate_entries @staticmethod def rstarGenDistributions(entries, M, m): result_list", "axis, False) return next_result def xtreeSupernodeInsert(self, node, entries): if node.isSuperNode() == False: node.setToSuperNode(True)", "* x2 + offset, multiplier * y2 + offset) if depth != 0:", "toString(self): root = self.getRootEntry().getChild() return self.toStringHelper(root) def toStringHelper(self, node): if node == None:", "mbr, entry): if entry.getMBR().isRaw() == True: if entry.getMBR().doesEnclose(mbr) == True: return True else:", "if mbr.getUpperLeft() != mbr.getLowerRight(): raise Exception(\"attempted to turn a non-point mbr to a", "\") + \")\" return overall_str def toLeafStatusString(self): root = self.getRootEntry().getChild() return self.toLeafStatusStringHelper(root) def", "for n = 1000 # k = int(round((5500 * math.log(5500, 2)) ** (1", "split_result return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [e, ee], True) else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False)", "/ denominator)) # for n = 10000 # k = int(round((20000 * math.log(20000,", "# raise Exception() node.setToSuperNode(False) elif node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): mbr_list = [x.getMBR() in node.getEntries()]", "to priority queue curr_node = curr_entry.getChild() curr_mbr = curr_entry.getMBR() curr_mbr_is_actual = curr_mbr.isRaw() curr_mbr_is_contained", "!= 0: while len(internal_node_stack_deque) != 0 or len(heap) != 0: # entry =", "# and close-ancestor finding; the assumption is necessary # to make strong running", "result next_result = (entry_group1, entry_group2, axis) return next_result def xtreeOverlapMinimalSplit(self, node, entry): if", "node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): mbr_list = [x.getMBR() in node.getEntries()] curr_x_tree = RTree() overlap_area_sum =", "in window_size_pairs if x[0] <= M and x[0] >= m and x[1] <=", "such a node exists # def delete(self, E, RN): def findLeaf(self, entry): return", "if leaf_node.isFull() == False: leaf_node.addEntry(entry) entry.getChild().setParent(leaf_node) adjust_result = RTree.rstarAdjustTree(self, leaf_node, [entry], False) else:", "def retrieveEntryForChild(self, node): return (self.child_to_entry_dict)[node] def toString(self): return str(self.getEntries()) class RTreeEntry: def __init__(self,", "= RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2 = result parent = curr_node.getParent() \"\"\"", "1) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(partner_entry) partner_entry.getChild().setParent(parent) return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False) else: split_result =", "rstarGenDistributions(entries, M, m): result_list = [] if len(entries) > (M + 1): raise", "entry.getChild().getEntries() for curr_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), mbr) == True: self.doOverlapQueryHelper(mbr, curr_entry, partial_result,", "does_enclose def isEqualTo(self, mbr): upper_left1 = self.getUpperLeft() lower_right1 = self.getLowerRight() upper_left2 = mbr.getUpperLeft()", "= mbr.getLowerRight() x1, y1 = upper_left x2, y2 = lower_right multiplier = 1", "M, m): result_list = [] if len(entries) > (M + 1): raise Exception()", "entry.getChild() child_str = self.toLeafStatusStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list,", "of RN to find E.mbr # else: # RN is an internal node", "1487)), \\ ((660, 268, 962), (1293, 619, 1521)), \\ ((798, 928, 1028), (1762,", "{} for i in range(len(entries)): curr_entry = entries[i] curr_mbr = curr_entry.getMBR() mbr_to_entry_dict[curr_mbr] =", "a \"left\" comp_a1 = min(upper_left_a[i], lower_right_a[i]) # a \"right\" comp_a2 = max(upper_left_a[i], lower_right_a[i])", "rectangle is contained by a rectangle in conflict x-tree, # add actual rectangles", "= ImageDraw.Draw(im) root = self.getRoot() root.draw(self, draw, 0) im.save(\"tree.png\", \"PNG\") \"\"\" # image", "for x in tagged_mbr_list] area_values = [x[0] for x in tagged_area_values] min_area =", "is not a leaf) # remove the root # set as new root", "[base_mbr, mbr] upper_left_points = [x.getUpperLeft() for x in mbr_list] lower_right_points = [x.getLowerRight() for", "entry_collection2, dimension = split_result if was_successful == True: mbr_collection1 = [x.getMBR() for x", "for x in entry_group2] curr_overall_mbr1 = CompositeMBR.makeMBR(mbr_group1) curr_overall_mbr2 = CompositeMBR.makeMBR(mbr_group2) for curr_entry in", "False not in [x.getChild().getNumEntries() == 0 for x in self.getEntries()]) return is_non_traditional_leaf_node \"\"\"", "for curr_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), mbr) == True: self.doContainmentQueryHelper(mbr, curr_entry, partial_result) #", "RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) curr_x_tree.insert(next_entry) union_area = curr_x_tree.getUnionArea() multi_overlap_ratio = overlap_area_sum / (1.0 *", "RawMBR((100, 100, 0), (100, 100, 0), (100, 100, 0)) curr_mbr2 = RawMBR((50, 100,", "{} for i in xrange(entries[0].getMBR().getDimension()): low_comp_distributions, upper_comp_distributions = result[i] S_comp_value = 0 low_constituent_mbr_list_pairs", "parent.retrieveEntryForChild(curr_node) parent.removeEntry(original_entry) if node != self.getRootEntry().getChild(): parent.addEntry(entry1) parent.addEntry(entry2) node1.setParent(parent) node2.setParent(parent) else: next_root =", "parent.removeEntry(node.getEntry()) parent.addEntry(entry1) parent.addEntry(entry2) # print \"split #2\" return (RTree.SPLIT, [node1, node2]) else: self.xtreeSupernodeInsert(node,", "if x.getEntry().getMBR().isRaw() == True] start_rectangle_entries = [x.getEntry() for x in start_rectangle_nodes] start_rectangle_to_close_ancestor_entries_dict =", "# print \"split #2\" return (RTree.SPLIT, [node1, node2]) else: self.xtreeSupernodeInsert(node, [x.getEntry() for x", "raise Exception() result2 = self.xtreeOverlapMinimalSplit(node, entry) entry_collection3, entry_collection4, dimension, do_fail = result2 #", "root_mbr = CompositeMBR(None, None, None) root_entry = RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) return else:", "node.getParent() == None: entry = tree.getRootEntry() else: entry = node.getParent().retrieveEntryForChild(node) \"\"\" entry =", "= (x2, y2, z2) upper_left = (x, y) lower_right = (x, y) #", "mbr) == True: self.doContainmentQueryHelper(mbr, curr_entry, partial_result) # prefix order def getNodes(self): node_list =", "start_rectangle_to_close_ancestor_entries_dict = {} for start_rectangle_entry in start_rectangle_entries: start_rectangle_to_close_ancestor_entries_dict[start_rectangle_entry] = [] for start_rectangle_entry in", "rectangle, add children to priority queue, # ignore if contained rectangle is contained", "random.random() * 100) # z2 = int(z1 + random.random() * 100) x =", "True: \"\"\" if node.isUnderfull() == True: # print \"underfull\" parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node))", "None if color_choice == 0: color = PythonMagick.Color(65535, 0, 0, 32767) elif color_choice", "# for n = 20000 # k = int(round((14500 * math.log(14500, 2)) **", "[(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x in upper_constituent_mbr_list_pairs] upper_margin_values = [x[0].getMarginValue() + x[1].getMarginValue() for x", "0: upper_left = (0, 0) lower_right = (10, 10) elif i % 4", "= [] self.condenseTreeHelper(leaf_node, Q) # Q is in order of low-level to high-level;", "return self.mbr def setMBR(self, mbr): self.mbr = mbr def getChild(self): return self.child def", "getAllRectangleCloseAncestors(self): start_rectangle_nodes = [x for x in self.getNodes() if x.getEntry().getMBR().isRaw() == True] start_rectangle_entries", "if x.getEntry().getMBR().isRaw() == True] keep_nodes = [x for x in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw()", "[e, ee], True, False) else: return (False, []) \"\"\" # assume item is", "x, y: x * y, sides) return intersection_volume def getMarginValue(self): upper_left = self.getUpperLeft()", "None if True: # if node.getNumChildren() == 0 and node == self.getRootEntry().getChild(): #", "# print \"supernode #1\" return (RTree.SUPERNODE, [node]) elif split_status == RTree.SUPERNODE: pass #", "mbr1.isEqualTo(mbr2) == True: overlap_ratio = 1 else: overlap_ratio = 0 else: overlap_ratio =", "going to be good enough to cut down branches explored; # to counter", "True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for curr_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), mbr)", "# if we made it this far, we should add children to priority", "40) mbr = RawMBR(upper_left, lower_right, None) node = RTreeNode(None, [], True) entry =", "to find E.mbr # else: # RN is an internal node # find", "None if have_resulting_second_entry_from_split == True: first_entry, second_entry = resulting_entries_from_split partner_entry = second_entry if", "Exception() result_entry_list = [] self.getRectangleCloseDescendantsHelper(heap, reference_mbr, result_entry_list, reference_entry) return result_entry_list # def TopicKNearestNeighborBestFirstSearchHelper(self,", "curr_entry self.split_history_root_dimension = split_history_root_dimension self.is_supernode = is_supernode self.entry = entry def getEntry(self): return", "M # updated on 2016-11-06 to add single-start-rectangle-based # close-descendant finding that takes", "for curr_entry in entry_collection1: curr_entry.getChild().setParent(node1) for curr_entry in entry_collection2: curr_entry.getChild().setParent(node2) node1.setSplitHistoryRootDimension(dimension) node2.setSplitHistoryRootDimension(dimension) if", "def __init__(self): root_node = RTreeNode(None, [], True) root_mbr = CompositeMBR(None, None, None) root_entry", "x in candidate_tagged_area_values] return candidate_entries @staticmethod def rstarGenDistributions(entries, M, m): result_list = []", "x in component_mbr_list] lower_right_points = [x.getLowerRight() for x in component_mbr_list] points = upper_left_points", "x-tree, # add actual rectangles to conflict x-tree, # use as priority (prefer_contained,", "0 and False not in [x.getChild().getNumEntries() == 0 for x in self.getEntries()]) is_leaf_node", "curr_entries] mbr_list = [x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) self.condenseTreeHelper(node.getParent(),", "setSplitHistoryRootDimension(self, dim): self.split_history_root_dimension = dim def getParent(self): return self.parent def getEntries(self): return (self.child_to_entry_dict).values()", "combined_area_tagged_next_candidate_distributions] min_combined_area_value = min(combined_area_values) matching_combined_area_tagged_next_candidate_distributions = [x for x in combined_area_tagged_next_candidate_distributions if x[0]", "never split a super-node if node.isSuperNode() == True: # raise Exception() return (False,", "1 return self.hasConsistentNonTraditionalLeafDepthValuesHelper(root, depth, 0) def hasConsistentNonTraditionalLeafDepthValuesHelper(self, node, depth, curr_depth): if node ==", "True) for priority_tagged_internal_entry in priority_tagged_internal_entries: priority, internal_entry = priority_tagged_internal_entry item = internal_entry internal_node_stack_deque.appendleft(item)", "in entries: if MBR.doOverlap(curr_entry.getMBR(), mbr) == True: self.doOverlapQueryHelper(mbr, curr_entry, partial_result, without_borders) # returns", "if entry.getMBR().isRaw() == True: if entry.getMBR().doesEnclose(mbr) == True: partial_result.append(entry) else: entries = entry.getChild().getEntries()", "lower_right2 return is_equal class RawMBR(MBR): def __init__(self, upper_left, lower_right, contained_item): MBR.__init__(self, upper_left, lower_right)", "a list of entries def getRectangleCloseDescendants(self, reference_entry): # repeatedly pop nodes, prune using", "= is_leaf self.m = 8 self.M = 16 self.child_to_entry_dict = {} for curr_entry", "in entry_group2: curr_entry.getChild().setParent(node2) mbr_group1 = [x.getMBR() for x in entry_group1] mbr_group2 = [x.getMBR()", "= (120, 120) \"\"\" denominator = (100 * math.log(100, 2)) ** (1 /", "lower_right_b[i]) # print comp_a1, comp_a2, comp_b1, comp_b2 # do_overlap = True if without_borders", "main(): point1 = (30, 100, 0) point2 = (40, 100, 0) point3 =", "dimension) def xtreeTopologicalSplit(self, node, entry): m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall =", "E, RN): def findLeaf(self, entry): return self.findLeafHelper(entry, self.getRootEntry()) def findLeafHelper(self, entry, curr_entry): \"\"\"", "x-tree featuring enclosure and containment queries # dimension is implicit (determined using points", "perimeter_y)) children = [x.getChild() for x in entries] entry.draw(tree, entries, image, depth +", "= node.getEntries() children = node.getChildren() have_node_str = True is_root_node = node == self.getRootEntry().getChild()", "split # updated on 2016-11-03 to re-structure and modify adjustTree(); # stop at", "(6, 10, 10), (9, 10, 10), (6, 10, 10), (9, 10, 10), (3,", "to turn a non-point mbr to a point\") return mbr.getUpperLeft() def getVec(self): return", "tested # returns entries # does intersection query def doOverlapQuery(self, mbr, without_borders =", "= self.resolveEnlargementTie(candidate_entries, entry) \"\"\" else: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries) != 1:", "# don't use isLeafNode() for this, as internal nodes can temporarily look like", "child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \" \") + \")\" return overall_str", "for entry in entries: # if len(tree.getNodes()) != 0: # print \"removing entry", "= \"(\" + string.join(overall_str_list, \" \") + \")\" return overall_str def toLeafStatusString(self): root", "mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight() do_overlap = True", "priority): pair = (priority,item) heapq.heappush(self.heap,pair) def pop(self): (priority,item) = heapq.heappop(self.heap) return item def", "65535, 0, 32767) if upper_left == lower_right: image.strokeColor(\"none\") image.fillColor(color) center_x = next_x1 center_y", "(ul_i, ul_i + 10000) # two strange things going on - saturation occurs", "(1085, 718, 1259)), \\ ((808, 926, 151), (889, 1755, 320)), \\ ((945, 260,", "depth != curr_depth: return False else: return True else: for curr_node in node.getChildren():", "entry = node.getParent().retrieveEntryForChild(node) children = [x.getChild() for x in curr_entries] mbr_list = [x.getMBR()", "we don't need a O(log(n)) time operation # to find the entry containing", "False: # ignore node if associated mbr does not enclose reference mbr #", "curr_mbr, curr_entry = tagged_overlapped_mbr curr_node = curr_entry.getChild() result = self.findLeafHelper(entry, curr_node) if result", "self.getNodesHelper(node, node_list) return node_list \"\"\" def getUnionArea(self): pass \"\"\" # takes O(log(n)) time", "super-node # updated on 2016-08-23 to fix traditional/non-traditional isLeafNode() distinction # updated on", "ll.setParent(self.getRootEntry().getChild()) else: split_result = self.rstarSplitNode(self.getRootEntry().getChild(), ee) l, ll, e, ee = split_result resulting_entries_from_split", "PythonMagick.Color(0, 65535, 0, 32767) if upper_left == lower_right: image.strokeColor(\"none\") image.fillColor(color) center_x = next_x1", "tagged_mbr_list = [(x.getMBR(), x) for x in entries] tagged_overlapped_mbr_list = [x for x", "True: return True return False # returns entries def doContainmentQuery(self, mbr): partial_result =", "def toDepthStringHelper(self, node, depth): if node == None: return \"\" entries = node.getEntries()", "Image.new(\"RGB\", (768, 768), \"white\") draw = ImageDraw.Draw(im) root = self.getRoot() root.draw(self, draw, 0)", "self.getContainedItem() mbr = RawMBR(upper_left, lower_right, contained_item) return mbr def doesMatch(self, mbr): upper_left_matches =", "random.random() * 100) # y2 = int(y1 + random.random() * 100) # z2", "child_str = self.toDepthStringHelper(child, depth + 1) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\"", "next_root_entry = RTreeEntry(next_mbr, None) next_root = RTreeNode(None, [entry1, entry2], None, next_root_entry) next_root_entry.setChild(next_root) self.setRootEntry(next_root_entry)", "# print \"not underfull\" parent = node.getParent() curr_entries = node.getEntries() entry = parent.retrieveEntryForChild(node)", "curr_entry.getMBR().doesEnclose(mbr) == True: result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, curr_entry) if result == True: return True", "926, 151), (889, 1755, 320)), \\ ((945, 260, 1091), (1932, 332, 1133)), \\", "10000) # two strange things going on - saturation occurs # if we", "n # n = 100 # 0.427 seconds (~1x slower for 1x growth;", "isRaw(self): return False def isComposite(self): return False def getUpperLeft(self): return self.upper_left def getLowerRight(self):", "if have_node_str == True: curr_leaf_status = \"-\" if node.isLeafNode() == False else \"+\"", "for entry in entries: child = entry.getChild() child_str = self.toNumChildrenStringHelper(child) curr_str = child_str", "y = random.randint(0, 10000) # upper_left = (x1, y1, z1) # lower_right =", "partner_entry.setMBR(partner_tight_overall_mbr) if node.isLeafNode() == False: if have_resulting_second_entry_from_split == True: if (parent.getNumChildren() + 1)", "result_mbr def getContainedItem(self): return self.contained_item def getMBRList(self): return [self] def clone(self): upper_left =", "entry_group2 = result parent = curr_node.getParent() \"\"\" if parent != None and (node", "0 for x in self.getEntries()]) is_leaf_node = self.getNumChildren() == 0 return is_leaf_node def", "point1 x2, y2 = point2 change_x = x2 - x1 change_y = y2", "= sum(low_margin_values) S_comp_value += low_margin_value_sum upper_constituent_mbr_list_pairs = [([y.getMBR() for y in x[0]], [y.getMBR()", "partial_result def doContainmentQueryHelper(self, mbr, entry, partial_result): if entry.getMBR().isRaw() == True: # print mbr.toString(),", "tagged_area_values = [(x[0].getArea(), x[1]) for x in tagged_mbr_list] area_values = [x[0] for x", "# if node is a leaf node, it has an actual rectangle #", "overall_str_list = [] for entry in entries: child = entry.getChild() child_str = self.toEntriesArePresentStringHelper(child)", "min_area = min(area_values) candidate_tagged_area_values = [x for x in tagged_area_values if x[0] ==", "for x in matching_combined_area_tagged_next_candidate_distributions] chosen_distribution_pair = next_next_candidates[0] return chosen_distribution_pair def chooseLeaf(self, entry): return", "[x.getMBR() for x in entry_group1] mbr_group2 = [x.getMBR() for x in entry_group2] curr_overall_mbr1", "overall_str_list = [] if node.getNumChildren() == 0 else [node.getEntry().getMBR().toString()] # overall_str_list = []", "node_list) return node_list def getNodesHelper(self, node, partial_result): partial_result.append(node) for curr_node in node.getChildren(): self.getNodesHelper(curr_node,", "tree.delete(entry6) tree.delete(entry7) tree.delete(entry8) \"\"\" print tree.toString() tree2 = RTree() import random entries =", "in matching_overlap_value_tagged_candidate_distributions] if len(matching_overlap_value_tagged_candidate_distributions) > 1: next_candidate_distributions = next_next_candidates mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for", "# upper_left = (x1, y1, z1) # lower_right = (x2, y2, z2) upper_left", "= [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for x in mbr_list_pair_tagged_candidate_distributions] combined_area_tagged_next_candidate_distributions = [(x[0][0].getArea() + x[0][1].getArea(),", "[((797, 989, 602), (910, 1248, 1035)), \\ ((920, 974, 724), (1802, 1524, 1378)),", "explored; # to counter saturation, domain has to grow with n # n", "mbr): upper_left_matches = self.getUpperLeft() == mbr.getUpperLeft() lower_right_matches = self.getLowerRight() == mbr.getLowerRight() result =", "== False: # if we made it this far, we should add children", "getAreaEnlargement(base_mbr, mbr): base_mbr_area = base_mbr.getArea() enlarged_mbr = MBR.getEnlargedMBR(base_mbr, mbr) enlarged_mbr_area = enlarged_mbr.getArea() area_change", "= self.getRootEntry().getChild() curr_node = root depth = 0 while curr_node.isLeafNode() == False: curr_node", "int(z1 + random.random() * 100) x = random.randint(0, 10000) y = random.randint(0, 10000)", "= CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return else: # raise Exception() # print \"decision point\" \"\"\"", "# 1.1649 seconds (~2.72x slower for 10x growth; expected 33x slower) # n", "lower_right_b = mbr_b.getLowerRight() do_overlap = True # assume that rectangles never have negative", "(0, 0) lower_right = (10, 10) mbr = RawMBR(upper_left, lower_right, None) node =", "for x in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() == True] keep_nodes = [x for x", "overlap_area_sum / (1.0 * union_area) if multi_overlap_ratio <= RTree.MAX_OVERLAP_RATIO: node.setToSuperNode(False) elif node.isUnderfull() ==", "0 for x in self.getEntries()]) return is_non_traditional_leaf_node \"\"\" def isTraditionalLeafNode(self): is_traditional_leaf_node = self.getNumEntries()", "entries: child = entry.getChild() child_str = self.toDepthStringHelper(child, depth + 1) curr_str = child_str", "= self.xtreeInsertHelper(entry, follow) split_status, added_nodes = result curr_entry = node.getEntry() curr_mbr = curr_entry.getMBR()", "nodes and # best-first priority queue for leaf nodes # updated on 2016-11-16", "self.entry = entry def getEntry(self): return self.entry def setEntry(self, entry): self.entry = entry", "in candidate_tagged_enlargement_values] return candidate_entries def chooseEntriesWithMinimalAreaEnlargement(self, entries, entry): mbr_to_entry_dict = {} for i", "point2 = (40, 100, 0) point3 = (50, 100, 0) point4 = (60,", "enclosure and containment queries # dimension is implicit (determined using points sampled) and", "in mbr_list] points = upper_left_points + lower_right_points min_components = [] max_components = []", "[y.getMBR() for y in x[1]]), x) for x in candidate_distributions] mbr_pair_tagged_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]),", "== True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for curr_entry in entries: if curr_entry.getMBR().doesEnclose(mbr)", "= RTree.rstarAdjustTree(self, l, [e, ee], True) ended_with_split2, resulting_entries_from_split = adjust_result if ended_with_split2 ==", "repeatedly pop nodes, prune using enclosure/containment # w.r.t. reference rectangle, add children to", "1) class MBR: def __init__(self, upper_left, lower_right): self.upper_left = upper_left self.lower_right = lower_right", "entry) if len(candidate_entries) != 1: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(candidate_entries, entry) if len(candidate_entries) != 1:", "while len(heap) != 0: while len(internal_node_stack_deque) != 0 or len(heap) != 0: #", "point to same entries # unless we explicitly create new entries, # which", "== None: entry = tree.getRootEntry() curr_entries = entry.getChild().getEntries() children = [x.getChild() for x", "== True: mbr_collection1 = [x.getMBR() for x in entry_collection1] mbr_collection2 = [x.getMBR() for", "doContainmentQuery(self, mbr): partial_result = [] self.doContainmentQueryHelper(mbr, self.getRootEntry(), partial_result) return partial_result def doContainmentQueryHelper(self, mbr,", "# n = 20000 n = 1000 import math for i in xrange(n):", "= RawMBR((50, 100, 0), (50, 100, 0), point3) curr_mbr2b = RawMBR((50, 50, 0),", "overall_str_list = [node.getEntry().getMBR().toString(), str(node)] else: overall_str_list = [] if node.getNumChildren() == 0 else", "r-tree # is not acyclic and we have cliques # note that we", "[x[0] for x in tagged_enlargement_values] min_enlargement_value = min(enlargement_values) candidate_tagged_enlargement_values = [x for x", "Q = list(set(Q)) Q.reverse() for curr_node in Q: curr_entry = curr_node.getEntry() # print", "and lower_right1 == lower_right2 return is_equal class RawMBR(MBR): def __init__(self, upper_left, lower_right, contained_item):", "!= 0 or len(heap) != 0: # entry = entry_pq.pop() item = None", "lower_right_a[i] comp_b1 = upper_left_b[i] comp_b2 = lower_right_b[i] side = max(0, min(comp_a2, comp_b2) -", "parent = entry.getChild().getParent() mbr = entry.getMBR() location = Point.toPoint(mbr) x, y = location", "= [x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) self.condenseTreeHelper(node.getParent(), Q) return", "else: overlap_ratio = 0 else: overlap_ratio = overlap_area / (1.0 * union_area) #", "None) next_node = RTreeNode(None, [], True) next_entry = RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) curr_x_tree.insert(next_entry) union_area", "return result def toList(self): pair_list = self.heap items = [x[1] for x in", "True overall_str_list = None if is_root_node == False: overall_str_list = [node.getEntry().getMBR().toString()] # overall_str_list", "resulting_entries_from_split partner_entry = second_entry if have_resulting_second_entry_from_split == True: partner_node = partner_entry.getChild() partner_entries =", "if upper_left == lower_right: image.strokeColor(\"none\") image.fillColor(color) center_x = next_x1 center_y = next_y1 radius", "next_y1 + radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y)) else: image.strokeColor(color) image.fillColor(\"none\") image.strokeWidth(4) image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1,", "self.id_value = id_value @staticmethod def toPoint(mbr): if mbr.getUpperLeft() != mbr.getLowerRight(): raise Exception(\"attempted to", "2016-11-06 to add single-start-rectangle-based # close-descendant finding that takes O(log(n)) time on average", "offset = (1536 * 0.2) / 2 next_x = multiplier * x next_y", "for n = 100 # k = int(round((1000 * math.log(1000, 2)) ** (1", "node.getChildren() have_node_str = True overall_str_list = None if have_node_str == True: curr_leaf_status =", "(110, 200, 100), point3) node3 = RTreeNode(None, [], True) entry3 = RTreeEntry(mbr3, node3)", "(prefer_contained, prefer_large_area_if_contained_else_small) if self.getRootEntry().getChild().getNumChildren() == 0: return [] reference_mbr = reference_entry.getMBR() root_entry =", "here tree.insert(entry8) print tree.toString() print tree.doEnclosureQuery(curr_mbr2) curr_mbr3 = RawMBR((50, 100, 0), (110, 200,", "lower_right = self.getLowerRight() if self.getDimension() == 0: raise Exception() if self.getDimension() == 1:", "def toPoint(mbr): if mbr.getUpperLeft() != mbr.getLowerRight(): raise Exception(\"attempted to turn a non-point mbr", "= entry.getChild().getEntries() for curr_entry in entries: if curr_entry.getMBR().doesEnclose(mbr) == True: result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr,", "entry) if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) chosen_entry = candidate_entries[0] chosen_child", "< comp_b2 and comp_a2 > comp_b1 else: do_overlap = do_overlap and comp_a1 <=", "+ random.random() * 100) # z2 = int(z1 + random.random() * 100) x", "line presumes that we have parent set correctly for a leaf, # which", "in xrange(n): upper_left = None lower_right = None \"\"\" if i % 4", "True for i in xrange(dimension): left_value1 = self.getUpperLeft()[i] left_value2 = mbr.getUpperLeft()[i] right_value1 =", "# print \"supernodes:\", [x for x in tree.getNodes() if x.isSuperNode() == True], tree.getRootEntry().getChild()", "worst; # and to add all-start-rectangles close-ancestor finding, # which for a well-formed", "= self.getRootEntry().getChild() return self.toEntriesArePresentStringHelper(root) def toEntriesArePresentStringHelper(self, node): if node == None: return \"\"", "= (20, 20) lower_right = (40, 40) mbr = RawMBR(upper_left, lower_right, None) node", "# print comp_a1, comp_a2, comp_b1, comp_b2 # do_overlap = True if without_borders ==", "result_entry_list, reference_entry) return result_entry_list # def TopicKNearestNeighborBestFirstSearchHelper(self, heap, point, TopicKNearest, k): def getRectangleCloseDescendantsHelper(self,", "note that nodes always point to same entries # unless we explicitly create", "j in xrange(i + 1, self.getDimension()): comp_2a = upper_left[j] comp_2b = lower_right[j] term2", "n = 1000 # k = int(round((5500 * math.log(5500, 2)) ** (1 /", "in xrange(dimension): comp_a1 = upper_left_a[i] comp_a2 = lower_right_a[i] comp_b1 = upper_left_b[i] comp_b2 =", "at worst; # assumes that rectangles are distinct # return a list of", "= curr_entry.getChild() curr_mbr = curr_entry.getMBR() curr_mbr_is_actual = curr_mbr.isRaw() curr_mbr_is_contained = reference_mbr.doesEnclose(curr_mbr) curr_mbr_area =", "a raw mbr # leaf is a non-traditional leaf leaf_node = child_node.getParent() if", "split a super-node # updated on 2016-08-23 to fix traditional/non-traditional isLeafNode() distinction #", "0: while len(internal_node_stack_deque) != 0 or len(heap) != 0: # entry = entry_pq.pop()", "remove E from L # call algorithm condenseTree(L) # if the root has", "(110, 100, 0) curr_mbr1 = RawMBR((100, 100, 0), (100, 100, 0), (100, 100,", "case when we initially insert parent = node.getParent() entry1 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection1), None) node1", "# find all entries of RN that cover E.mbr # follow the corresponding", "= RTreeEntry(mbr4, node4) node4.setEntry(entry4) tree.insert(entry4) mbr5 = RawMBR(point5, (110, 200, 100), point5) node5", "= y2 - y1 distance = math.sqrt(change_x ** 2 + change_y ** 2)", "offset = (768 * 0.2) / 2 offset = (1536 * 0.2) /", "= [(x[0].getArea(), x[1]) for x in tagged_mbr_list] area_values = [x[0] for x in", "[x for x in tagged_area_values if x[0] == min_area] candidate_entries = [x[1] for", "chosen_child) def insert(self, entry): return self.xtreeInsert(entry) def chooseSubtree(self, entry, node): entries = node.getEntries()", "multiplier = 1 / (1.0 * 6.5) * 0.8 offset = (1536 *", "curr_entry) priority_tagged_internal_entries.append(priority_tagged_internal_entry) # item = curr_entry # pair = (priority,item) # if curr_mbr.doesEnclose(reference_mbr)", "min(combined_area_values) matching_combined_area_tagged_next_candidate_distributions = [x for x in combined_area_tagged_next_candidate_distributions if x[0] == min_combined_area_value] next_next_candidates", "= tree.rstarSplitNode(parent, partner_entry) l, ll, e, ee = split_result return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [e,", "min_overlap_value] next_next_candidates = [x[1] for x in matching_overlap_value_tagged_candidate_distributions] if len(matching_overlap_value_tagged_candidate_distributions) > 1: next_candidate_distributions", "have_resulting_second_entry_from_split) \"\"\" @staticmethod def adjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split, is_first_call_after_first_pass): if node == None:", "adjust_result = None if leaf_node.isFull() == False: leaf_node.addEntry(entry) entry.getChild().setParent(leaf_node) adjust_result = RTree.rstarAdjustTree(self, leaf_node,", "None) root_entry = RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) return else: entry = self.getRootEntry() curr_entries", "16 self.child_to_entry_dict = {} for curr_entry in entries: curr_child = curr_entry.getChild() (self.child_to_entry_dict)[curr_child] =", "Q.reverse() for curr_node in Q: curr_entry = curr_node.getEntry() # print \"mbr:\", curr_entry.getMBR().toString() #", "contained within reference mbr continue if conflict_x_tree.doEnclosureQueryWithEarlyStopping(mbr) == True: # ignore node if", "x: x.getMBR().getLowerRight()[i]) upper_distributions = [(upper_sorted_entries[ : window_left_sizes[j]], upper_sorted_entries[window_left_sizes[j] : ]) for j in", "1035)), \\ ((920, 974, 724), (1802, 1524, 1378)), \\ ((911, 953, 196), (1776,", "and lower_right_matches == True return result class CompositeMBR(MBR): def __init__(self, upper_left, lower_right, mbr_list):", "result2 # raise Exception() if do_fail == True or len(entry_collection3) < node.getMinimumNumEntriesPerNode() or", "that rectangles are distinct # return a list of entries def getRectangleCloseDescendants(self, reference_entry):", "if node.isLeafNode() == True: candidate_entries = self.chooseEntriesWithMinimalOverlapEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries", "next_node = RTreeNode(None, [], True) next_entry = RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) conflict_x_tree.insert(next_entry) elif node.isLeafNode()", "entry = self.getRootEntry() curr_entries = entry.getChild().getEntries() children = [x.getChild() for x in curr_entries]", "100, 0) point2 = (40, 100, 0) point3 = (50, 100, 0) point4", "x in overlap_value_tagged_candidate_distributions if x[0] == min_overlap_value] next_next_candidates = [x[1] for x in", "# print \"tree:\", self.toString() self.insert(curr_entry) def condenseTreeHelper(self, node, Q): # demote super-node if", "3.0) / denominator)) # for n = 14500 # x1 = int(100 +", "in entries: curr_child = curr_entry.getChild() (self.child_to_entry_dict)[curr_child] = curr_entry self.split_history_root_dimension = split_history_root_dimension self.is_supernode =", "curr_entry in entry_group2: next_curr_node = curr_entry.getChild() if curr_entry != entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node2) entry1", "low_margin_value_sum = sum(low_margin_values) S_comp_value += low_margin_value_sum upper_constituent_mbr_list_pairs = [([y.getMBR() for y in x[0]],", "] upper_sorted_entries.sort(key = lambda x: x.getMBR().getLowerRight()[i]) upper_distributions = [(upper_sorted_entries[ : window_left_sizes[j]], upper_sorted_entries[window_left_sizes[j] :", "\\ ((803, 1054, 307), (1776, 1597, 501)), \\ ((803, 233, 521), (1314, 717,", "curr_entry in entries: # set priority correctly and add to priority queue curr_node", "unique for close-descendant # and close-ancestor finding; the assumption is necessary # to", "in priority_tagged_internal_entries: priority, internal_entry = priority_tagged_internal_entry item = internal_entry internal_node_stack_deque.appendleft(item) # print \"conflict", "entry.setMBR(tight_overall_mbr) return (have_resulting_second_entry_from_split, resulting_entries_from_split) else: parent = node.getParent() curr_entries = node.getEntries() entry =", "- x) <= M and (len(entries) - x) >= m] window_size_pairs = [(window_left_sizes[i],", "\\ ((808, 926, 151), (889, 1755, 320)), \\ ((945, 260, 1091), (1932, 332,", "result_entry_list.append(entry) raw_mbr = mbr next_mbr = raw_mbr.clone() next_node = RTreeNode(None, [], True) next_entry", "if x <= M and x >= m and (len(entries) - x) <=", "str(node)] else: overall_str_list = [] if node.getNumChildren() == 0 else [node.getEntry().getMBR().toString()] # overall_str_list", "self.lower_right def getArea(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() sides = [] for", "m = 8 and M = 16 # these numbers are for upper-left's", "= RawMBR(point2, (110, 200, 100), point2) node2 = RTreeNode(None, [], True) entry2 =", "= id_value @staticmethod def toPoint(mbr): if mbr.getUpperLeft() != mbr.getLowerRight(): raise Exception(\"attempted to turn", "[] for curr_entry in entries: base_mbr = curr_entry.getMBR() curr_mbr = MBR.getEnlargedMBR(base_mbr, mbr) tagged_mbr_list.append((curr_mbr,", "of actual rectangles for an r-tree; # takes O(n * log(n)) time at", "= [] self.doContainmentQueryHelper(mbr, self.getRootEntry(), partial_result) return partial_result def doContainmentQueryHelper(self, mbr, entry, partial_result): if", "entry2 = RTreeEntry(curr_overall_mbr2, node2) node1.setEntry(entry1) node2.setEntry(entry2) if parent != None: original_entry = parent.retrieveEntryForChild(curr_node)", "# tree.insert(entry1) \"\"\" tree.delete(entry1) tree.delete(entry2) tree.delete(entry3) tree.delete(entry4) tree.delete(entry5) tree.delete(entry6) tree.delete(entry7) tree.delete(entry8) \"\"\" print", "\"supernode #1\" return (RTree.SUPERNODE, [node]) elif split_status == RTree.SUPERNODE: pass # print \"no", "doOverlap(mbr_a, mbr_b, without_borders = False): upper_left_a = mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight() upper_left_b =", "x in candidate_tagged_enlargement_values] return candidate_entries def chooseEntriesWithMinimalAreaEnlargement(self, entries, entry): mbr_to_entry_dict = {} for", "curr_entry != entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node2) entry1 = RTreeEntry(curr_overall_mbr1, node1) entry2 = RTreeEntry(curr_overall_mbr2, node2)", "image.fillColor(\"black\") center_x = next_x + offset center_y = next_y + offset radius =", "node.getEntries() priority_tagged_internal_entries = [] for curr_entry in entries: # set priority correctly and", "= enlarged_mbr_area - base_mbr_area return area_change @staticmethod def doOverlap(mbr_a, mbr_b, without_borders = False):", "upper_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x in upper_constituent_mbr_list_pairs] upper_margin_values = [x[0].getMarginValue() + x[1].getMarginValue()", "node, it has an actual rectangle # decide whether to include associated entry", "# k = int(round((14500 * math.log(14500, 2)) ** (1 / 3.0) / denominator))", "317x slower) # n = 10000 # 84.222 seconds (~197x slower for 100x", "10x growth; expected 33x slower) # n = 5500 # 23.899 seconds (~55.96x", "root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) return else: entry = self.getRootEntry() curr_entries = entry.getChild().getEntries() children =", "comp_a2, comp_b1, comp_b2 # do_overlap = True if without_borders == True: do_overlap =", "\"conflict x-tree:\", conflict_x_tree.toString() # for a well-formed r-tree, this takes O(n * log(n))", "* root_mbr_area # entry_pq = PriorityQueue() heap = [] # entry_pq.push(root_entry, priority) item", "# def TopicKNearestNeighborBestFirstSearchHelper(self, heap, point, TopicKNearest, k): def getRectangleCloseDescendantsHelper(self, heap, reference_mbr, result_entry_list, ignore_entry):", "self.xtreeInsertHelper(entry, follow) split_status, added_nodes = result curr_entry = node.getEntry() curr_mbr = curr_entry.getMBR() mbr", "# item = curr_entry # pair = (priority,item) # if curr_mbr.doesEnclose(reference_mbr) == True", "denominator)) # for n = 10000 # k = int(round((20000 * math.log(20000, 2))", "= CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return (have_resulting_second_entry_from_split, resulting_entries_from_split) else: parent = node.getParent() curr_entries = node.getEntries()", "= self.getNumChildren() == 0 return is_leaf_node def addEntry(self, entry): curr_child = entry.getChild() (self.child_to_entry_dict)[curr_child]", "doEnclosureQuery(self, mbr): partial_result = [] self.doEnclosureQueryHelper(mbr, self.getRootEntry(), partial_result) return partial_result def doEnclosureQueryHelper(self, mbr,", "leaf_node.isFull() == False: leaf_node.addEntry(entry) entry.getChild().setParent(leaf_node) adjust_result = RTree.rstarAdjustTree(self, leaf_node, [entry], False) else: split_result", "= [(window_left_sizes[i], len(entries) - window_left_sizes[i]) for i in range(len(window_left_sizes))] window_size_pairs = [x for", "for x in partner_entries] partner_mbr_list = [x.getMBR() for x in partner_entries] partner_tight_overall_mbr =", "= [x.getChild() for x in entries] entry.draw(tree, entries, image, depth + 1) class", "\"no split\" return (RTree.NO_SPLIT, [node]) def rstarInsert(self, entry): leaf_node = self.rstarChooseLeaf(entry) adjust_result =", "logic for determining when to attempt an overlap-minimal split # updated on 2016-11-03", "if such a node exists # def delete(self, E, RN): def findLeaf(self, entry):", "n = 1000 import math for i in xrange(n): upper_left = None lower_right", "y, sides) return intersection_volume def getMarginValue(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() if", "None: raise Exception(\"expected a node to be found for a delete\") # if", "= curr_entry pair = (priority,item) heapq.heappush(heap,pair) elif curr_mbr.isRaw() == False: if curr_mbr.doesEnclose(reference_mbr) ==", "[entry], False) else: split_result = tree.rstarSplitNode(parent, partner_entry) l, ll, e, ee = split_result", "print \"supernode #1\" return (RTree.SUPERNODE, [node]) elif split_status == RTree.SUPERNODE: pass # print", "= MBR.getEnlargedMBR(curr_mbr, mbr) node.getEntry().setMBR(next_mbr) # this parent-setting step is crucial # if node.isNonTraditionalLeafNode()", "self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries() axis = RTree.rstarChooseSplitAxis(E_overall, M, m) result = RTree.rstarChooseSplitIndex(E_overall, axis,", "curr_node = node m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() axis = RTree.rstarChooseSplitAxis(E_overall, M,", "else None if leaf_node == None: raise Exception(\"expected a node to be found", "100) lower_right = (120, 120) \"\"\" denominator = (100 * math.log(100, 2)) **", "self.rstarChooseLeafHelper(entry, chosen_child) def insert(self, entry): return self.xtreeInsert(entry) def chooseSubtree(self, entry, node): entries =", "= (40, 40) mbr = RawMBR(upper_left, lower_right, None) node = RTreeNode(None, [], True)", "None if len(heap) != 0: (priority,item) = heapq.heappop(heap) elif len(internal_node_stack_deque) != 0: item", "entry1, entry2) @staticmethod def rstarPreadjustTree(self, leaf_node): node = leaf_node parent = node.getParent() if", "low_comp_distributions] low_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x in low_constituent_mbr_list_pairs] low_margin_values = [x[0].getMarginValue() +", "__init__(self): self.heap = [] def push(self, item, priority): pair = (priority,item) heapq.heappush(self.heap,pair) def", "return candidate_entries def chooseEntriesWithMinimalAreaEnlargement(self, entries, entry): mbr_to_entry_dict = {} for i in range(len(entries)):", "((262, 221, 872), (500, 279, 1521)), \\ ((332, 886, 493), (822, 1305, 1149)),", "= int(round((20000 * math.log(20000, 2)) ** (1 / 3.0) / denominator)) # for", "tuple(min_components) lower_right_point = tuple(max_components) result_mbr_list = base_mbr.getMBRList() + [mbr] mbr = CompositeMBR(upper_left_point, lower_right_point,", "+ lower_right_points min_components = [] max_components = [] for i in xrange(component_mbr_list[0].getDimension()): components", "in entries] tagged_overlapped_mbr_list = [x for x in tagged_mbr_list if MBR.doOverlap(curr_mbr, x[0]) ==", "for mbr in mbr_list: upper_left = mbr.getUpperLeft() lower_right = mbr.getLowerRight() x1, y1 =", "else: entries = node.getEntries() candidate_entries = None # if node.isLeafNode() == True: candidate_entries", "y: x * y, sides) return area @staticmethod def getEnlargedMBR(base_mbr, mbr): mbr_list =", "= max(0, min(comp_a2, comp_b2) - max(comp_a1, comp_b1)) sides.append(side) intersection_volume = reduce(lambda x, y:", "= PythonMagick.Color(0, 65535, 0, 32767) if upper_left == lower_right: image.strokeColor(\"none\") image.fillColor(color) center_x =", "= (1536 * 0.2) / 2 next_x = multiplier * x next_y =", "False) else: split_result = self.rstarSplitNode(leaf_node, entry) l, ll, e, ee = split_result adjust_result", "return (self.child_to_entry_dict)[node] def toString(self): return str(self.getEntries()) class RTreeEntry: def __init__(self, mbr, child): self.mbr", "note that we don't necessarily need PythonMagick # note that nodes always point", "split_result resulting_entries_from_split = [e, ee] next_root = RTreeNode(None, resulting_entries_from_split, False, self.getRootEntry()) l.setParent(next_root) ll.setParent(next_root)", "for i in xrange(base_mbr.getDimension()): components = [x[i] for x in points] min_comp_value =", "candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\" chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return chosen_entry", "is_supernode): self.is_supernode = is_supernode def getSplitHistoryRootDimension(self): return self.split_history_root_dimension def setSplitHistoryRootDimension(self, dim): self.split_history_root_dimension =", "node.getNumChildren() == 0: pass return (RTree.NO_SPLIT, [node]) \"\"\" follow = self.chooseSubtree(entry, node).getChild() result", "y2 = int(y1 + random.random() * 100) # z2 = int(z1 + random.random()", "n = 1,000 works in 2.996 sec. for pypy with m = 2", "M, m): result = RTree.rstarGenDistributions(entries, M, m) S_comp_dict = {} for i in", ">= right_value2 if component_does_enclose == False: does_enclose = False break return does_enclose def", "def getMaximumNumEntriesPerNode(self): return self.M def isFull(self): return self.getNumEntries() >= self.getMaximumNumEntriesPerNode() def isUnderfull(self): return", "else: next_root = RTreeNode(None, [entry1, entry2], False) self.getRootEntry().setChild(next_root) next_root.setEntry(self.getRootEntry()) node1.setParent(next_root) node2.setParent(next_root) pass return", "# n = 1,000 works in 2.996 sec. for pypy with m =", "tree.insert(entry6) mbr7 = RawMBR(point7, (110, 200, 100), point7) node7 = RTreeNode(None, [], True)", "1,000 works in 3.428 sec. for pypy with m = 8 and M", "next_y2 = y2 * multiplier + offset \"\"\" # image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2))", "location multiplier = 1 / (1.0 * 6.5) * 0.8 offset = (1536", "= node.getEntries() mbr_list = [x.getMBR() for x in entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) \"\"\"", "a leaf node, it has an actual rectangle # decide whether to include", "entry, node): if node.isLeafNode() == True: if node == self.getRootEntry().getChild(): return node else:", "= None curr_node = node m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() axis =", "in component_mbr_list] points = upper_left_points + lower_right_points min_components = [] max_components = []", "307), (1776, 1597, 501)), \\ ((803, 233, 521), (1314, 717, 1487)), \\ ((660,", "493), (822, 1305, 1149)), \\ ((800, 709, 871), (1390, 1402, 1548)), \\ ((433,", "curr_entry.getChild() (self.child_to_entry_dict)[curr_child] = curr_entry self.split_history_root_dimension = split_history_root_dimension self.is_supernode = is_supernode self.entry = entry", "matching_entry in matching_entries: # raise Exception() conflict_x_tree.delete(matching_entry) # if node is a leaf", "node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): node.setToSuperNode(False) if node.getParent() == None: # we are a root", "True else 1 second_priority_component = (-1 if curr_mbr_is_contained == True else 1) *", "[x[0] for x in tagged_area_values] min_area = min(area_values) candidate_tagged_area_values = [x for x", "# to make strong running time estimates; the reason is that # otherwise", "rectangle in conflict x-tree, # add actual rectangles to conflict x-tree, # use", "to insert using order of high-level to low-level # Q = list(set(Q)) Q.reverse()", "= next_y1 radius = 4 perimeter_x = next_x1 perimeter_y = next_y1 + radius", "upper_sorted_entries = entries[ : ] upper_sorted_entries.sort(key = lambda x: x.getMBR().getLowerRight()[i]) upper_distributions = [(upper_sorted_entries[", "have_resulting_second_entry_from_split): return tree.rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split) @staticmethod def rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split): if", "these numbers are for upper-left's in (100, 10100) and # lower-right's in (ul_i,", "1662, 455)), \\ ((596, 892, 131), (1543, 1838, 669)), \\ ((879, 319, 789),", "= min(area_values) candidate_tagged_area_values = [x for x in tagged_area_values if x[0] == min_area]", "node.getNumChildren() <= 1: # raise Exception() node.setToSuperNode(False) elif node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): mbr_list =", "y, sides) return area @staticmethod def getEnlargedMBR(base_mbr, mbr): mbr_list = [base_mbr, mbr] upper_left_points", "# or leaves in r-tree; these times assume \"maximal disjointedness\" # and depth-first", "node = RTreeNode(None, [], True) entry = RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry) \"\"\" for", "lower_right) self.contained_item = contained_item def isRaw(self): return True @staticmethod def makeMBRFromPoint(point): upper_left =", "return else: # raise Exception() # print \"decision point\" \"\"\" if node.isSuperNode() ==", "else: # raise Exception() # print \"decision point\" \"\"\" if node.isSuperNode() == True:", "log(n)) time; # these times involve n, which is number of actual rectangles", "print \"close ancestor:\", close_ancestor_entry.getMBR().toString() # raise Exception() # for entry in entries[0 :", "result = self.hasConsistentNonTraditionalLeafDepthValuesHelper(curr_node, depth, curr_depth + 1) if result == False: return False", "id_value): self.upper_left = upper_left self.lower_right = lower_right self.id_value = id_value def getUpperLeft(self): return", "overall_str_list = None if is_root_node == False: overall_str_list = [node.getEntry().getMBR().toString()] # overall_str_list =", "= resulting_entries_from_split partner_entry = second_entry if have_resulting_second_entry_from_split == True: partner_node = partner_entry.getChild() partner_entries", "entries of RN to find E.mbr # else: # RN is an internal", "mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight() dimension = mbr_a.getDimension() sides = [] for i in", "range(len(window_left_sizes))] window_size_pairs = [x for x in window_size_pairs if x[0] <= M and", "self.toStringHelper(root) def toStringHelper(self, node): if node == None: return \"\" entries = node.getEntries()", "x2 + offset, multiplier * y2 + offset) if depth != 0: pass", "getSize(self): return len(self.heap) import math def getDistance(point1, point2): x1, y1 = point1 x2,", "if curr_mbr_is_contained == True else 1 second_priority_component = (-1 if curr_mbr_is_contained == True", "0, 0, 32767) elif color_choice == 1: color = PythonMagick.Color(0, 0, 65535, 32767)", "len(entry_collection3) < node.getMinimumNumEntriesPerNode() or len(entry_collection4) < node.getMinimumNumEntriesPerNode(): return (False, None, None, dimension) else:", "else: self.xtreeSupernodeInsert(node, [x.getEntry() for x in added_nodes]) # print \"supernode #1\" return (RTree.SUPERNODE,", "x * y, sides) return intersection_volume def getMarginValue(self): upper_left = self.getUpperLeft() lower_right =", "upper_margin_values = [x[0].getMarginValue() + x[1].getMarginValue() for x in upper_mbr_pairs] upper_margin_value_sum = sum(upper_margin_values) S_comp_value", "next_y1, next_x2, next_y2)) if len(entries) == 0: parent = entry.getChild().getParent() mbr = entry.getMBR()", "= RTree.rstarAdjustTree(self, leaf_node, [entry], False) else: split_result = self.rstarSplitNode(leaf_node, entry) l, ll, e,", "right_value1 = self.getLowerRight()[i] right_value2 = mbr.getLowerRight()[i] component_does_enclose = left_value1 <= left_value2 and right_value1", "partner_entry = None if have_resulting_second_entry_from_split == True: first_entry, second_entry = resulting_entries_from_split partner_entry =", "x.getEntry().getMBR().isRaw() == True] keep_nodes = [x for x in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() ==", "0.8 offset = (1536 * 0.2) / 2 next_x = multiplier * x", "low_constituent_mbr_list_pairs = [([y.getMBR() for y in x[0]], [y.getMBR() for y in x[1]]) for", "RN is a leaf node # search all entries of RN to find", "print \"conflict x-tree:\", conflict_x_tree.toString() # for a well-formed r-tree, this takes O(n *", "item = curr_entry # pair = (priority,item) # if curr_mbr.doesEnclose(reference_mbr) == True or", "z2 = int(z1 + random.random() * 100) x = random.randint(0, 10000) y =", "candidate_distributions = None candidate_distributions = result[axis][0] + result[axis][1] mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for y", "[node]) if node.isLeafNode() == True: # split just in case # print \"split\"", "close_ancestor_entry_list: print \"close ancestor:\", close_ancestor_entry.getMBR().toString() # raise Exception() # for entry in entries[0", "overall_str def toEntriesArePresentString(self): root = self.getRootEntry().getChild() return self.toEntriesArePresentStringHelper(root) def toEntriesArePresentStringHelper(self, node): if node", "= upper_left1 == upper_left2 and lower_right1 == lower_right2 return is_equal class RawMBR(MBR): def", "i in xrange(self.getDimension()): comp1 = upper_left[i] comp2 = lower_right[i] side = comp2 -", "RTree.rstarAdjustTreeHelper(tree, node.getParent(), [e, ee], True) else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False) else: return", "partner_entry.getChild().setParent(parent) return tree.adjustTree(tree, parent, [entry], False, False) else: parent.addEntry(entry) entry.getChild().setParent(parent) split_result = tree.splitNode(parent,", "partial_result.append(entry) else: entries = entry.getChild().getEntries() for curr_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), mbr) ==", "entry in entries: child = entry.getChild() child_str = self.toEntriesArePresentStringHelper(child) curr_str = child_str overall_str_list.append(curr_str)", "# taken from set of actual rectangles for an r-tree; # takes O(n", "!= 0: pass color_choice = depth % 3 color = None if color_choice", "= next_next_candidates[0] return chosen_distribution_pair def chooseLeaf(self, entry): return self.chooseLeafHelper(entry, self.getRootEntry().getChild()) def chooseLeafHelper(self, entry,", "union_area) # raise Exception() if overlap_ratio > RTree.MAX_OVERLAP_RATIO: # raise Exception() result2 =", "d_S_pairs]) min_S_value_d_S_pair_candidates = [x for x in d_S_pairs if x[1] == min_S_value] chosen_d_S_pair", "that # otherwise the directed graph implied by the r-tree # is not", "self.getUpperLeft() lower_right = self.getLowerRight() result = str(list(upper_left + lower_right) + [self.isRaw()]) return result", "1524, 1378)), \\ ((911, 953, 196), (1776, 1662, 455)), \\ ((596, 892, 131),", "upper_margin_value_sum S_comp_dict[i] = S_comp_value d_S_pairs = S_comp_dict.items() min_S_value = min([x[1] for x in", "use as priority (prefer_contained, prefer_large_area_if_contained_else_small) if self.getRootEntry().getChild().getNumChildren() == 0: return [] reference_mbr =", "+ 1) if result == False: return False return True def toNumChildrenString(self): root", "= entry.getChild() curr_node.setParent(node) mbr = CompositeMBR.makeMBR([entry.getMBR()]) node.getEntry().setMBR(mbr) # print \"no split\" return (RTree.NO_SPLIT,", "elif node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): mbr_list = [x.getMBR() in node.getEntries()] curr_x_tree = RTree() overlap_area_sum", "if have_node_str == True: curr_leaf_status = \"-\" if (node.getParent() == None or (node.getParent()", "def getNodesHelper(self, node, partial_result): partial_result.append(node) for curr_node in node.getChildren(): self.getNodesHelper(curr_node, partial_result) def getNodesForNode(self,", "leaf is a non-traditional leaf leaf_node = child_node.getParent() if entry != self.getRootEntry() else", "entry = None \"\"\" if node.getParent() == None: entry = tree.getRootEntry() else: entry", "priority queue curr_node = curr_entry.getChild() curr_mbr = curr_entry.getMBR() curr_mbr_is_actual = curr_mbr.isRaw() curr_mbr_is_contained =", "[] self.getRectangleCloseDescendantsHelper(heap, reference_mbr, result_entry_list, reference_entry) return result_entry_list # def TopicKNearestNeighborBestFirstSearchHelper(self, heap, point, TopicKNearest,", "rectangles are distinct # return a list of entries def getRectangleCloseDescendants(self, reference_entry): #", "at root instead of non-existent parent of root; # also, we implement delete();", "mbr_list = [x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return else:", "matching_combined_area_tagged_next_candidate_distributions = [x for x in combined_area_tagged_next_candidate_distributions if x[0] == min_combined_area_value] next_next_candidates =", "False: node.setToSuperNode(True) # questionable if this is really necessary for entry in entries:", "15]: for entry in entries: tree2.insert(entry) \"\"\" if entry.getChild().getParent() == None: raise Exception()", "= node.getEntries() entry = None \"\"\" if node.getParent() == None: entry = tree.getRootEntry()", "root = self.getRootEntry().getChild() return self.toNumChildrenStringHelper(root) def toNumChildrenStringHelper(self, node): if node == None: return", "node.getEntries() entry = node.getParent().retrieveEntryForChild(node) children = [x.getChild() for x in curr_entries] mbr_list =", "== True: do_overlap = do_overlap and comp_a1 < comp_b2 and comp_a2 > comp_b1", "(priority,item) = heapq.heappop(heap) elif len(internal_node_stack_deque) != 0: item = internal_node_stack_deque.popleft() # (priority,item) =", "= node.getEntries() axis = RTree.rstarChooseSplitAxis(E_overall, M, m) result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m)", "split_history_root_dimension = None, is_supernode = False): self.parent = parent self.is_leaf = is_leaf self.m", "entries, entry): mbr = entry.getMBR() tagged_mbr_list = [] for curr_entry in entries: base_mbr", "\"no split\" return (RTree.NO_SPLIT, [node]) if node.isLeafNode() == True: # split just in", "upper-left's in (100, 10100) and # lower-right's in (ul_i, ul_i + 10000) #", "!= curr_depth: return False else: return True else: for curr_node in node.getChildren(): result", "= self.findLeaf(entry) child_node = entry.getChild() # root node never has a raw mbr", "#2\" return (RTree.SPLIT, [node1, node2]) else: self.xtreeSupernodeInsert(node, [x.getEntry() for x in added_nodes]) #", "remove the root # set as new root its only child pass def", "mbr3 = RawMBR(point3, (110, 200, 100), point3) node3 = RTreeNode(None, [], True) entry3", "1542)), \\ ((358, 815, 372), (761, 1089, 594)), \\ ((294, 238, 1036), (785,", "[x[1] for x in matching_combined_area_tagged_next_candidate_distributions] chosen_distribution_pair = next_next_candidates[0] return chosen_distribution_pair def chooseLeaf(self, entry):", "RN to find E.mbr # else: # RN is an internal node #", "xtreeSupernodeInsert(self, node, entries): if node.isSuperNode() == False: node.setToSuperNode(True) # questionable if this is", "x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) @staticmethod def rstarAdjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split):", "= [] self.getNodesHelper(node, node_list) return node_list \"\"\" def getUnionArea(self): pass \"\"\" # takes", "x in curr_entries] mbr_list = [x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list)", "isSuperNode(self): return self.is_supernode def setToSuperNode(self, is_supernode): self.is_supernode = is_supernode def getSplitHistoryRootDimension(self): return self.split_history_root_dimension", "== mbr.getLowerRight() result = upper_left_matches == True and lower_right_matches == True return result", "and comp_a1 <= comp_b2 and comp_a2 >= comp_b1 if do_overlap == False: break", "ll, e, ee = split_result return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [e, ee], True) else: return", "mbr, entry, partial_result, without_borders): if entry.getMBR().isRaw() == True: if MBR.doOverlap(entry.getMBR(), mbr, without_borders) ==", "area_change @staticmethod def doOverlap(mbr_a, mbr_b, without_borders = False): upper_left_a = mbr_a.getUpperLeft() lower_right_a =", "[x.getMBR() for x in entry_group2] curr_overall_mbr1 = CompositeMBR.makeMBR(mbr_group1) curr_overall_mbr2 = CompositeMBR.makeMBR(mbr_group2) for curr_entry", "# print \"decision point\" \"\"\" if node.isSuperNode() == True: # print \"supernode encountered\"", "# raise Exception() # print \"decision point\" \"\"\" if node.isSuperNode() == True: #", "offset radius = 2 perimeter_x = next_x + offset perimeter_y = next_y +", "denominator = (100 * math.log(100, 2)) ** (1 / 3.0) k = 1", "number of actual rectangles or leaves; # assumes that rectangles are distinct def", "comp_b2 = max(upper_left_b[i], lower_right_b[i]) # print comp_a1, comp_a2, comp_b1, comp_b2 # do_overlap =", "self.lower_right = lower_right def isRaw(self): return False def isComposite(self): return False def getUpperLeft(self):", "if do_fail == True or len(entry_collection3) < node.getMinimumNumEntriesPerNode() or len(entry_collection4) < node.getMinimumNumEntriesPerNode(): return", "entry.getMBR().isRaw() == True: # print mbr.toString(), entry.getMBR().toString() if mbr.doesEnclose(entry.getMBR()) == True: partial_result.append(entry) else:", "children = [x.getChild() for x in curr_entries] mbr_list = [x.getMBR() for x in", "toLeafStatusString(self): root = self.getRootEntry().getChild() return self.toLeafStatusStringHelper(root) def toLeafStatusStringHelper(self, node): if node == None:", "for curr_entry in entry_group2: next_curr_node = curr_entry.getChild() if curr_entry != entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node2)", "ignore node if associated mbr does not enclose reference mbr # and associated", "seconds (~197x slower for 100x growth; expected 664x slower) # n = 14500", "== 0: parent = entry.getChild().getParent() mbr = entry.getMBR() location = Point.toPoint(mbr) x, y", "= node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) Q.append(node) # raise Exception() if node.getNumChildren() <= 1: # raise", "\") + \")\" return overall_str def chooseEntriesWithMinimalOverlapEnlargement(self, entries, entry): mbr_to_entry_dict = {} for", "[], True) entry = RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry) \"\"\" for i in xrange(10):", "intersection query def doOverlapQuery(self, mbr, without_borders = False): partial_result = [] self.doOverlapQueryHelper(mbr, self.getRootEntry(),", "lambda x: x.getMBR().getUpperLeft()[i]) low_distributions = [(low_sorted_entries[ : window_left_sizes[j]], low_sorted_entries[window_left_sizes[j] : ]) for j", "= entry.getChild().getParent() mbr = entry.getMBR() location = Point.toPoint(mbr) x, y = location multiplier", "class MBR: def __init__(self, upper_left, lower_right): self.upper_left = upper_left self.lower_right = lower_right def", "entry1.setChild(node1) entry2 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection2), None) node2 = RTreeNode(parent, entry_collection2, None, entry2) entry2.setChild(node2) for", "= self.findLeafHelper(entry, curr_node) if result == None: continue else: return curr_node return None", "# z1 = int(100 + random.randint(0, k) * 100) # x2 = int(x1", "root.getEntries() chosen_entry = entries[0] chosen_child = chosen_entry.getChild() self.setRoot(chosen_child) \"\"\" # if RN is", "1: upper_left = (20, 20) lower_right = (40, 40) elif i % 4", "\"\" entries = node.getEntries() children = node.getChildren() have_node_str = True overall_str_list = None", "def condenseTree(self, leaf_node): Q = [] self.condenseTreeHelper(leaf_node, Q) # Q is in order", "RTreeNode(parent, entry_group1, prev_leaf_status) node2 = RTreeNode(parent, entry_group2, prev_leaf_status) for curr_entry in entry_group1: curr_entry.getChild().setParent(node1)", "in tagged_area_values] min_area = min(area_values) candidate_tagged_area_values = [x for x in tagged_area_values if", "* 0.2) / 2 x1 = 0 y1 = 0 x2 = 47", "with n # n = 100 # 0.427 seconds (~1x slower for 1x", "= 2 * (x2 - x1) + 2 * (y2 - y1) return", "next_node.setEntry(next_entry) curr_x_tree.insert(next_entry) union_area = curr_x_tree.getUnionArea() multi_overlap_ratio = overlap_area_sum / (1.0 * union_area) if", "radius = 4 perimeter_x = next_x1 perimeter_y = next_y1 + radius image.draw(PythonMagick.DrawableCircle(center_x, center_y,", "parent self.is_leaf = is_leaf self.m = 8 self.M = 16 self.child_to_entry_dict = {}", "0.8 # offset = (768 * 0.2) / 2 offset = (1536 *", "!= 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return", "for curr_node in node.getChildren(): self.getNodesHelper(curr_node, partial_result) def getNodesForNode(self, node): node_list = [] self.getNodesHelper(node,", "this is idempotent for added_node in added_nodes: node.addEntry(added_node.getEntry()) added_node.setParent(node) if split_status == RTree.SPLIT:", "depth-first stack for internal nodes and # best-first priority queue for leaf nodes", "131), (1543, 1838, 669)), \\ ((879, 319, 789), (1877, 744, 791)), \\ ((1081,", "left_value1 <= left_value2 and right_value1 >= right_value2 if component_does_enclose == False: does_enclose =", "entry_collection1] mbr_collection2 = [x.getMBR() for x in entry_collection2] mbr1 = CompositeMBR.makeMBR(mbr_collection1) mbr2 =", "we don't necessarily need PythonMagick # note that nodes always point to same", "low_margin_value_sum upper_constituent_mbr_list_pairs = [([y.getMBR() for y in x[0]], [y.getMBR() for y in x[1]])", "adjustTree(); # fixed bug with parent pointers for xtreeInsert(); # have supernode demotion", "<= M and (len(entries) - x) >= m] window_size_pairs = [(window_left_sizes[i], len(entries) -", "[((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for x in mbr_list_pair_tagged_candidate_distributions] combined_area_tagged_next_candidate_distributions = [(x[0][0].getArea() + x[0][1].getArea(), x[1])", "add children to priority queue, # ignore if contained rectangle is contained by", "# questionable if this is really necessary for entry in entries: curr_node =", "\"\"\" @staticmethod def adjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split, is_first_call_after_first_pass): if node == None: return", "overlap means maximal disjointedness # is not going to be good enough to", "100) # x2 = int(x1 + random.random() * 100) # y2 = int(y1", "14500 # 170.053 seconds (~398x slower for 145x growth; expected 1040x slower) #", "= random.randint(0, 10000) y = random.randint(0, 10000) # upper_left = (x1, y1, z1)", "pair = (priority,item) heapq.heappush(heap,pair) # print entry_pq # raise Exception() result_entry_list = []", "parent.addEntry(entry) parent.addEntry(partner_entry) entry.getChild().setParent(parent) partner_entry.getChild().setParent(parent) return tree.adjustTree(tree, parent, [entry], False, False) else: parent.addEntry(entry) entry.getChild().setParent(parent)", "associated entry in result; # if we made it this far, we should", "is_first_call_after_first_pass): if node == None: return (False, []) else: parent = node.getParent() curr_entries", "mbr_collection2 = [x.getMBR() for x in entry_collection2] # this line presumes that we", "color = PythonMagick.Color(0, 65535, 0, 32767) if upper_left == lower_right: image.strokeColor(\"none\") image.fillColor(color) center_x", "print \"insert\" return self.xtreeInsertHelper(entry, self.getRootEntry().getChild()) SPLIT = 0 SUPERNODE = 1 NO_SPLIT =", "comp1 = upper_left[i] comp2 = lower_right[i] side = comp2 - comp1 sides.append(side) area", "next_mbr = raw_mbr.clone() next_node = RTreeNode(None, [], True) next_entry = RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry)", "= node.getParent().retrieveEntryForChild(node) children = [x.getChild() for x in curr_entries] mbr_list = [x.getMBR() for", "RTreeNode(None, resulting_entries_from_split, False, self.getRootEntry()) l.setParent(next_root) ll.setParent(next_root) self.getRootEntry().setChild(next_root) else: pass MAX_OVERLAP_RATIO = 0.2 def", "RawMBR(point4, (110, 200, 100), point4) node4 = RTreeNode(None, [], True) entry4 = RTreeEntry(mbr4,", "entry_collection2, None, entry2) entry2.setChild(node2) for curr_entry in entry_collection1: curr_entry.getChild().setParent(node1) for curr_entry in entry_collection2:", "None: entry = tree.getRootEntry() else: entry = node.getParent().retrieveEntryForChild(node) \"\"\" entry = parent.retrieveEntryForChild(node) children", "resulting_entries_from_split, have_resulting_second_entry_from_split): if node.getParent() == None: entry = tree.getRootEntry() curr_entries = entry.getChild().getEntries() children", "partial_result, without_borders) # returns entries def doEnclosureQuery(self, mbr): partial_result = [] self.doEnclosureQueryHelper(mbr, self.getRootEntry(),", "pypy with m = 8 and M = 16 # n = 6,000", "in component_mbr_list] lower_right_points = [x.getLowerRight() for x in component_mbr_list] points = upper_left_points +", "tree.getRootEntry() else: entry = node.getParent().retrieveEntryForChild(node) \"\"\" entry = parent.retrieveEntryForChild(node) children = [x.getChild() for", "root_mbr.getArea() first_priority_component = 0 if root_mbr_is_contained == True else 1 second_priority_component = (-1", "# decide whether to include associated entry in result; # if we made", "2 and M = 4 # n = 1,000 works in 2.996 sec.", "conflict x-tree returns entries matching_entries = conflict_x_tree.doContainmentQuery(mbr) for matching_entry in matching_entries: # raise", "10) mbr = RawMBR(upper_left, lower_right, None) node = RTreeNode(None, [], True) entry =", "self.getUpperLeft() == mbr.getUpperLeft() lower_right_matches = self.getLowerRight() == mbr.getLowerRight() result = upper_left_matches == True", "# Q = list(set(Q)) Q.reverse() for curr_node in Q: curr_entry = curr_node.getEntry() #", "def getRootEntry(self): return self.root_entry def setRootEntry(self, root_entry): self.root_entry = root_entry def hasConsistentNonTraditionalLeafDepthValues(self): root", "* 100) # z1 = int(100 + random.randint(0, k) * 100) # x2", "this far, we should add to conflict x-tree result_entry_list.append(entry) raw_mbr = mbr next_mbr", "leaf_node == None: raise Exception(\"expected a node to be found for a delete\")", "self.insert(curr_entry) def condenseTreeHelper(self, node, Q): # demote super-node if necessary if node.isSuperNode() ==", "mbr_b.getLowerRight() dimension = mbr_a.getDimension() sides = [] for i in xrange(dimension): comp_a1 =", "in close_ancestor_entry_list: print \"close ancestor:\", close_ancestor_entry.getMBR().toString() # raise Exception() # for entry in", "parent.addEntry(entry1) parent.addEntry(entry2) node1.setParent(parent) node2.setParent(parent) else: next_root = RTreeNode(None, [entry1, entry2], False) self.getRootEntry().setChild(next_root) next_root.setEntry(self.getRootEntry())", "instead of non-existent parent of root; # also, we implement delete(); note that", "* log(n)) time at worst; # and to add all-start-rectangles close-ancestor finding, #", "= (low_distributions, upper_distributions) result_list.append(curr_tuple) return result_list @staticmethod def rstarChooseSplitAxis(entries, M, m): result =", "int(round((14500 * math.log(14500, 2)) ** (1 / 3.0) / denominator)) # for n", "child_str = self.toStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \"", "len(entries) - window_left_sizes[i]) for i in range(len(window_left_sizes))] window_size_pairs = [x for x in", "i in xrange(dimension): comp_a1 = upper_left_a[i] comp_a2 = lower_right_a[i] comp_b1 = upper_left_b[i] comp_b2", "65535, 32767) elif color_choice == 2: color = PythonMagick.Color(0, 65535, 0, 32767) if", "= RTree() import random entries = [] # lower_rights = [(3, 10, 10),", "upper_comp_distributions] upper_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x in upper_constituent_mbr_list_pairs] upper_margin_values = [x[0].getMarginValue() +", "mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight() dimension = mbr_a.getDimension() sides = []", "= [x.getMBR() for x in entry_collection2] # this line presumes that we have", "# if node.isLeafNode() == True: candidate_entries = self.chooseEntriesWithMinimalOverlapEnlargement(entries, entry) if len(candidate_entries) != 1:", "= CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) partner_entry = None if have_resulting_second_entry_from_split == True: first_entry, second_entry =", "overlap logic for determining when to attempt an overlap-minimal split # updated on", "determining when to attempt an overlap-minimal split # updated on 2016-11-03 to re-structure", "entry_group2, prev_leaf_status) for curr_entry in entry_group1: curr_entry.getChild().setParent(node1) for curr_entry in entry_group2: curr_entry.getChild().setParent(node2) mbr_group1", "operation # to find the entry containing node; just look at parent of", "x.getMBR().getLowerRight()[i]) upper_distributions = [(upper_sorted_entries[ : window_left_sizes[j]], upper_sorted_entries[window_left_sizes[j] : ]) for j in xrange(len(window_left_sizes))]", "E_overall = list(set(curr_node.getEntries() + [entry])) return self.rstarSplitNodeHelper(node, E_overall, entry) def rstarSplitNodeHelper(self, node, E_overall,", "[x[1] for x in pair_list] return items def getSize(self): return len(self.heap) import math", "== False and reference_mbr.doesEnclose(mbr) == False: # ignore node if associated mbr does", "3.0) k = 1 # k = int(round(denominator / denominator)) # for n", "= [x for x in window_size_pairs if x[0] <= M and x[0] >=", "upper_left_points = [x.getUpperLeft() for x in component_mbr_list] lower_right_points = [x.getLowerRight() for x in", "math def getDistance(point1, point2): x1, y1 = point1 x2, y2 = point2 change_x", "and we have cliques # note that we don't necessarily need PythonMagick #", "is_equal = upper_left1 == upper_left2 and lower_right1 == lower_right2 return is_equal class RawMBR(MBR):", "Q = [] self.condenseTreeHelper(leaf_node, Q) # Q is in order of low-level to", "are a root node if self.getRootEntry().getChild().getNumChildren() == 0: root_node = RTreeNode(None, [], True)", "root.getNumChildren() == 1: # shorten tree entries = root.getEntries() chosen_entry = entries[0] chosen_child", "== 1: color = PythonMagick.Color(0, 0, 65535, 32767) elif color_choice == 2: color", "node.isLeafNode() prev_leaf_status = None curr_node = node m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode()", "self.findLeafHelper(entry, curr_node) if result == None: continue else: return curr_node return None \"\"\"", "comp_b2 = lower_right_b[i] side = max(0, min(comp_a2, comp_b2) - max(comp_a1, comp_b1)) sides.append(side) intersection_volume", "safe path to a leaf where the leaf mbr # is not contained", "close_descendant_entry in close_descendant_entries: start_rectangle_to_close_ancestor_entries_dict[close_descendant_entry].append(start_rectangle_entry) return start_rectangle_to_close_ancestor_entries_dict def draw(self): # im = Image.new(\"RGB\", (512,", "tree.doContainmentQuery(curr_mbr3) # raise Exception() print tree.doOverlapQuery(curr_mbr2) # raise Exception() print tree.toString() # tree.delete(entry1)", "True: node.addEntry(entry) entry.getChild().setParent(node) \"\"\" elif node.getNumChildren() == 0: pass return (RTree.NO_SPLIT, [node]) \"\"\"", "curr_node.setParent(node) \"\"\" entries = node.getEntries() mbr_list = [x.getMBR() for x in entries] tight_overall_mbr", "200, 100), point6) node6 = RTreeNode(None, [], True) entry6 = RTreeEntry(mbr6, node6) node6.setEntry(entry6)", "curr_mbr_is_contained == True else 1) * curr_mbr_area # min-pq # priority = (first_priority_component,", "things going on - saturation occurs # if we increase n and do", "that we assume rectangles are unique for close-descendant # and close-ancestor finding; the", "True and is_first_call_after_first_pass != True: partner_node = partner_entry.getChild() partner_entries = partner_node.getEntries() partner_children =", "k = int(round((5500 * math.log(5500, 2)) ** (1 / 3.0) / denominator)) #", "= [x.getMBR() for x in partner_entries] partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if have_resulting_second_entry_from_split ==", "to re-structure and modify adjustTree(); # stop at root instead of non-existent parent", "True or reference_mbr.doesEnclose(curr_mbr) == True: # heapq.heappush(heap,pair) priority_tagged_internal_entries.sort(key = lambda x: x[0], reverse", "== True: return True else: entries = entry.getChild().getEntries() for curr_entry in entries: if", "else: do_overlap = do_overlap and comp_a1 <= comp_b2 and comp_a2 >= comp_b1 if", "if node.getParent() == None: entry = tree.getRootEntry() else: entry = node.getParent().retrieveEntryForChild(node) \"\"\" entry", "RawMBR(point3, (110, 200, 100), point3) node3 = RTreeNode(None, [], True) entry3 = RTreeEntry(mbr3,", "\"\"\" chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return chosen_entry def xtreeInsert(self, entry): #", "= int(round((5500 * math.log(5500, 2)) ** (1 / 3.0) / denominator)) # for", "node1.setEntry(entry1) node2.setEntry(entry2) if parent != None: original_entry = parent.retrieveEntryForChild(curr_node) parent.removeEntry(original_entry) if node !=", "@staticmethod def makeMBR(component_mbr_list): upper_left_points = [x.getUpperLeft() for x in component_mbr_list] lower_right_points = [x.getLowerRight()", "self.toEntriesArePresentStringHelper(root) def toEntriesArePresentStringHelper(self, node): if node == None: return \"\" entries = node.getEntries()", "if len(candidate_entries) != 1: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(candidate_entries, entry) if len(candidate_entries) != 1: candidate_entries", "class CompositeMBR(MBR): def __init__(self, upper_left, lower_right, mbr_list): MBR.__init__(self, upper_left, lower_right) self.mbr_list = mbr_list", "\\ ((225, 359, 290), (579, 950, 700)), \\ ((297, 196, 750), (1085, 718,", "term1 = comp_1b - comp_1a for j in xrange(i + 1, self.getDimension()): comp_2a", "tree.getRootEntry().getChild() # tree2.draw() print len(tree2.getNodes()) import time time1 = time.time() result = tree2.getAllRectangleCloseAncestors()", "finding, # which for a well-formed r-tree, takes O(n * log(n)) time; #", "= curr_entry.getMBR() curr_mbr = MBR.getEnlargedMBR(base_mbr, mbr) tagged_mbr_list.append((curr_mbr, curr_entry)) tagged_area_values = [(x[0].getArea(), x[1]) for", "def hasConsistentNonTraditionalLeafDepthValuesHelper(self, node, depth, curr_depth): if node == None: return elif node.isLeafNode() ==", "do_overlap == False: break return do_overlap @staticmethod def findOverlapArea(mbr_a, mbr_b): if MBR.doOverlap(mbr_a, mbr_b)", "self.getRootEntry()) return result def doEnclosureQueryWithEarlyStoppingHelper(self, mbr, entry): if entry.getMBR().isRaw() == True: if entry.getMBR().doesEnclose(mbr)", "1259)), \\ ((808, 926, 151), (889, 1755, 320)), \\ ((945, 260, 1091), (1932,", "None: return (None, None, None, True) else: m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode()", "= [([y.getMBR() for y in x[0]], [y.getMBR() for y in x[1]]) for x", "min(upper_left_b[i], lower_right_b[i]) # b \"right\" comp_b2 = max(upper_left_b[i], lower_right_b[i]) # print comp_a1, comp_a2,", "node.setToSuperNode(True) # questionable if this is really necessary for entry in entries: curr_node", "if have_resulting_second_entry_from_split == True and is_first_call_after_first_pass != True: partner_node = partner_entry.getChild() partner_entries =", "= self.xtreeOverlapMinimalSplit(node, entry) entry_collection3, entry_collection4, dimension, do_fail = result2 # raise Exception() if", "node.getEntry().setMBR(mbr) # print \"no split\" return (RTree.NO_SPLIT, [node]) if node.isLeafNode() == True: #", "return node else: return node.getParent() else: entries = node.getEntries() candidate_entries = None #", "RTreeNode(None, [entry1, entry2], None, next_root_entry) next_root_entry.setChild(next_root) self.setRootEntry(next_root_entry) node1.setParent(next_root) node2.setParent(next_root) else: parent.removeEntry(node.getEntry()) parent.addEntry(entry1) parent.addEntry(entry2)", "CompositeMBR.makeMBR(x[0][1])), x[1]) for x in mbr_list_pair_tagged_candidate_distributions] combined_area_tagged_next_candidate_distributions = [(x[0][0].getArea() + x[0][1].getArea(), x[1]) for", "# print \"insert\" return self.xtreeInsertHelper(entry, self.getRootEntry().getChild()) SPLIT = 0 SUPERNODE = 1 NO_SPLIT", "def xtreeSplitNode(self, node, entry): # we never split a super-node if node.isSuperNode() ==", "node.getEntries() axis = node.getSplitHistoryRootDimension() result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2 =", "else: pass MAX_OVERLAP_RATIO = 0.2 def xtreeSplitNode(self, node, entry): # we never split", "= leaf_node parent = node.getParent() if parent != None: curr_entries = node.getEntries() entry", "M, m) entry_group1, entry_group2 = result next_result = (entry_group1, entry_group2, axis, False) return", "@staticmethod def makeMBRFromPoint(point): upper_left = point lower_right = point result_mbr = RawMBR(upper_left, lower_right,", "candidate_tagged_enlargement_values] return candidate_entries def chooseEntriesWithMinimalAreaEnlargement(self, entries, entry): mbr_to_entry_dict = {} for i in", "= node @staticmethod def draw(tree, entries, image, depth): for entry in entries: RTreeEntry.drawHelper(tree,", "entry containing node; just look at parent of entry child if curr_entry.getMBR().isRaw() ==", "for x in tagged_enlargement_values if x[0] == min_enlargement_value] candidate_entries = [mbr_to_entry_dict[x[1]] for x", "\"(\" + string.join(overall_str_list, \" \") + \")\" return overall_str def toEntriesArePresentString(self): root =", "upper_constituent_mbr_list_pairs = [([y.getMBR() for y in x[0]], [y.getMBR() for y in x[1]]) for", "self.id_value class Point: def __init__(self, vec, id_value): self.vec = vec self.id_value = id_value", "= node.getEntries() entry = parent.retrieveEntryForChild(node) children = [x.getChild() for x in curr_entries] mbr_list", "def rstarAdjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split): return tree.rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split) @staticmethod def rstarAdjustTreeHelper(tree,", "str(node.getNumChildren()) overall_str_list = [curr_leaf_status] else: overall_str_list = [] for entry in entries: child", "node.getChildren(): result = self.hasConsistentNonTraditionalLeafDepthValuesHelper(curr_node, depth, curr_depth + 1) if result == False: return", "going on - saturation occurs # if we increase n and do not", "RTreeNode(None, [], True) entry5 = RTreeEntry(mbr5, node5) node5.setEntry(entry5) tree.insert(entry5) mbr6 = RawMBR(point6, (110,", "@staticmethod def doOverlap(mbr_a, mbr_b, without_borders = False): upper_left_a = mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight()", "== True: have_node_str = True overall_str_list = None if is_root_node == False: overall_str_list", "O(n * log(n)) time at worst; # and to add all-start-rectangles close-ancestor finding,", "without_borders) == True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for curr_entry in entries: if", "10100) and # lower-right's in (ul_i, ul_i + 10000) # two strange things", "= 100 # k = int(round((1000 * math.log(1000, 2)) ** (1 / 3.0)", "2: color = PythonMagick.Color(0, 65535, 0, 32767) if upper_left == lower_right: image.strokeColor(\"none\") image.fillColor(color)", "return (False, []) \"\"\" # assume item is in tree # returns a", "x, y = location multiplier = 1 / (1.0 * 6.5) * 0.8", "enclosing mbr exists in conflict x-tree continue if entry == ignore_entry: # ignore", "node1.setParent(next_root) node2.setParent(next_root) else: parent.removeEntry(node.getEntry()) parent.addEntry(entry1) parent.addEntry(entry2) # print \"split #2\" return (RTree.SPLIT, [node1,", "if component_does_enclose == False: does_enclose = False break return does_enclose def isEqualTo(self, mbr):", "offset, multiplier * y2 + offset) if depth != 0: pass color_choice =", "x[0] == min_enlargement_value] candidate_entries = [mbr_to_entry_dict[x[1]] for x in candidate_tagged_enlargement_values] return candidate_entries def", "def getNumEntries(self): return len(self.child_to_entry_dict) def getNumChildren(self): return self.getNumEntries() def setParent(self, node): self.parent =", "+ offset perimeter_y = next_y + offset + radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y))", "curr_node.removeEntry(curr_entry) next_curr_node.setParent(node1) for curr_entry in entry_group2: next_curr_node = curr_entry.getChild() if curr_entry != entry:", "self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries() axis = node.getSplitHistoryRootDimension() result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m)", "return a list of entries def getRectangleCloseDescendants(self, reference_entry): # repeatedly pop nodes, prune", "else \"+\" overall_str_list = [curr_leaf_status] else: overall_str_list = [] for entry in entries:", "a delete\") # if parent has zero entries after removing this entry, this", "True) root_mbr = CompositeMBR(None, None, None) root_entry = RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) def", "ll, e, ee = split_result resulting_entries_from_split = [e, ee] next_root = RTreeNode(None, resulting_entries_from_split,", "entry with mbr:\", entry.getMBR().toString() # print \"tree, currently:\", tree.toString() # tree2.delete(entry) pass #", "0), (110, 200, 100), None) print tree.doContainmentQuery(curr_mbr3) # raise Exception() print tree.doOverlapQuery(curr_mbr2) #", "(1536 * 0.2) / 2 next_x1, next_y1 = (multiplier * x1 + offset,", "= [x.getUpperLeft() for x in mbr_list] lower_right_points = [x.getLowerRight() for x in mbr_list]", "root_mbr = root_entry.getMBR() root_mbr_is_actual = root_mbr.isRaw() root_mbr_is_contained = reference_mbr.doesEnclose(root_mbr) root_mbr_area = root_mbr.getArea() first_priority_component", "pair = (priority,item) heapq.heappush(self.heap,pair) def pop(self): (priority,item) = heapq.heappop(self.heap) return item def isEmpty(self):", "= (-1 if curr_mbr_is_contained == True else 1) * curr_mbr_area # min-pq #", "isEmpty(self): return len(self.heap) == 0 def peek(self): heap = self.heap pair = heap[0]", "= S_comp_dict.items() min_S_value = min([x[1] for x in d_S_pairs]) min_S_value_d_S_pair_candidates = [x for", "upper_left_matches = self.getUpperLeft() == mbr.getUpperLeft() lower_right_matches = self.getLowerRight() == mbr.getLowerRight() result = upper_left_matches", "getChild(self): return self.child def setChild(self, node): self.child = node @staticmethod def draw(tree, entries,", "sum(low_margin_values) S_comp_value += low_margin_value_sum upper_constituent_mbr_list_pairs = [([y.getMBR() for y in x[0]], [y.getMBR() for", "works import sys # import PythonMagick import heapq from collections import deque #", "return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [e, ee], True) else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False) else:", "== 2: x1, y1 = upper_left x2, y2 = lower_right margin = 2", "n = 20000 # k = int(round((14500 * math.log(14500, 2)) ** (1 /", "lower_right = (40, 40) elif i % 4 == 2: upper_left = (60,", "getRectangleCloseDescendants(self, reference_entry): # repeatedly pop nodes, prune using enclosure/containment # w.r.t. reference rectangle,", "priority_tagged_internal_entry in priority_tagged_internal_entries: priority, internal_entry = priority_tagged_internal_entry item = internal_entry internal_node_stack_deque.appendleft(item) # print", "# split just in case # print \"split\" return (RTree.SPLIT, [node]) elif node.isNonTraditionalLeafNode()", "entry): # we never split a super-node if node.isSuperNode() == True: # raise", "# returns entries def doContainmentQuery(self, mbr): partial_result = [] self.doContainmentQueryHelper(mbr, self.getRootEntry(), partial_result) return", "from set of actual rectangles for an r-tree; # takes O(n * log(n))", "= 2000 # n = 1000 # n = 20000 n = 1000", "+ 1)] window_left_sizes = [x for x in window_left_sizes if x <= M", "parent.getMaximumNumEntriesPerNode(): parent.addEntry(entry) parent.addEntry(partner_entry) entry.getChild().setParent(parent) partner_entry.getChild().setParent(parent) return tree.adjustTree(tree, parent, [entry], False, False) else: parent.addEntry(entry)", "mbr @staticmethod def getAreaEnlargement(base_mbr, mbr): base_mbr_area = base_mbr.getArea() enlarged_mbr = MBR.getEnlargedMBR(base_mbr, mbr) enlarged_mbr_area", "node.getNumChildren() > node.getMaximumNumEntriesPerNode(): split_result = self.xtreeSplitNode(node, entry) was_successful, entry_collection1, entry_collection2, dimension = split_result", "getMinimumNumEntriesPerNode(self): return self.m def getMaximumNumEntriesPerNode(self): return self.M def isFull(self): return self.getNumEntries() >= self.getMaximumNumEntriesPerNode()", "if root.getNumChildren() == 1: # shorten tree entries = root.getEntries() chosen_entry = entries[0]", "** (1 / 3.0) / denominator)) # for n = 10000 # k", "# y1 = int(100 + random.randint(0, k) * 100) # z1 = int(100", "sum(upper_margin_values) S_comp_value += upper_margin_value_sum S_comp_dict[i] = S_comp_value d_S_pairs = S_comp_dict.items() min_S_value = min([x[1]", "curr_node = root depth = 0 while curr_node.isLeafNode() == False: curr_node = curr_node.getChildren()[0]", "# x2 = int(x1 + random.random() * 100) # y2 = int(y1 +", "CompositeMBR.makeMBR(x[0][1])), x[1]) for x in mbr_list_pair_tagged_candidate_distributions] overlap_value_tagged_candidate_distributions = [(MBR.findOverlapArea(x[0][0], x[0][1]), x[1]) for x", "None: # we are a root node if self.getRootEntry().getChild().getNumChildren() == 0: root_node =", "candidate_entries def resolveEnlargementTie(self, entries, entry): mbr = entry.getMBR() tagged_mbr_list = [] for curr_entry", "and (len(entries) - x) >= m] window_size_pairs = [(window_left_sizes[i], len(entries) - window_left_sizes[i]) for", "call algorithm condenseTree(L) # if the root has only one child (and it", "(len(entries) - x) <= M and (len(entries) - x) >= m] window_size_pairs =", "entry): mbr_to_entry_dict = {} for i in range(len(entries)): curr_entry = entries[i] curr_mbr =", "(3, 10, 10)] # for i in xrange(10): # for i in xrange(4):", "nodes can temporarily look like leaf nodes # keep_nodes = [x for x", "k = int(round((10000 * math.log(10000, 2)) ** (1 / 3.0) / denominator)) #", "# while len(heap) != 0: while len(internal_node_stack_deque) != 0 or len(heap) != 0:", "self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\" chosen_entry =", "= [x for x in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() == True] keep_nodes = [x", "entries.append(entry) \"\"\" for i in xrange(10): upper_left = (20, 20) lower_right = (40,", "lower_right_points = [x.getLowerRight() for x in component_mbr_list] points = upper_left_points + lower_right_points min_components", "0), (100, 100, 0), HyperRectangle((50, 50, 0), (100, 100, 0), 1)) tree =", "200x growth; expected 1528x slower) # n = 2000 # n = 1000", "tree.insert(entry2) mbr3 = RawMBR(point3, (110, 200, 100), point3) node3 = RTreeNode(None, [], True)", "import sys # import PythonMagick import heapq from collections import deque # min-pq", "(and it is not a leaf) # remove the root # set as", "= lower_right self.id_value = id_value def getUpperLeft(self): return self.upper_left def getLowerRight(self): return self.lower_right", "None, None) root_entry = RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) def getRootEntry(self): return self.root_entry def", "[(low_sorted_entries[ : window_left_sizes[j]], low_sorted_entries[window_left_sizes[j] : ]) for j in xrange(len(window_left_sizes))] upper_sorted_entries = entries[", "RTree.rstarAdjustTree(self, leaf_node, [entry], False) else: split_result = self.rstarSplitNode(leaf_node, entry) l, ll, e, ee", "toString(self): return str(self.getEntries()) class RTreeEntry: def __init__(self, mbr, child): self.mbr = mbr self.child", "== True: # print mbr.toString(), entry.getMBR().toString() if mbr.doesEnclose(entry.getMBR()) == True: partial_result.append(entry) else: entries", "= entry.getChild() mbr = entry.getMBR() if mbr.doesEnclose(reference_mbr) == False and reference_mbr.doesEnclose(mbr) == False:", "return mbr def doesMatch(self, mbr): upper_left_matches = self.getUpperLeft() == mbr.getUpperLeft() lower_right_matches = self.getLowerRight()", "min_S_value_d_S_pair_candidates[0] chosen_d_value = chosen_d_S_pair[0] return chosen_d_value @staticmethod def rstarChooseSplitIndex(entries, axis, M, m): result", "lower_right = (120, 120) \"\"\" denominator = (100 * math.log(100, 2)) ** (1", "False) else: return (False, []) \"\"\" # assume item is in tree #", "= e.getChild() ll = ee.getChild() if (self.getRootEntry().getChild().getNumEntries() + 1) <= self.getRootEntry().getChild().getMaximumNumEntriesPerNode(): self.getRootEntry().getChild().addEntry(ee) ll.setParent(self.getRootEntry().getChild())", "+ offset + radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y)) children = [x.getChild() for x", "chosen_child = chosen_entry.getChild() return self.chooseLeafHelper(entry, chosen_child) def rstarChooseLeaf(self, entry): return self.rstarChooseLeafHelper(entry, self.getRootEntry().getChild()) def", "(110, 200, 100), point8) node8 = RTreeNode(None, [], True) entry8 = RTreeEntry(mbr8, node8)", "parent set correctly for a leaf, # which is not the case when", "1309)), \\ ((225, 359, 290), (579, 950, 700)), \\ ((297, 196, 750), (1085,", "= (priority,item) heapq.heappush(self.heap,pair) def pop(self): (priority,item) = heapq.heappop(self.heap) return item def isEmpty(self): return", "* y1 + offset) next_x2, next_y2 = (multiplier * x2 + offset, multiplier", "mbr8 = RawMBR(point8, (110, 200, 100), point8) node8 = RTreeNode(None, [], True) entry8", "== True] keep_nodes = [x for x in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() == True]", "0 else [node.getEntry().getMBR().toString()] # overall_str_list = [] if node.getNumChildren() == 0 else [node.getEntry().getMBR().toString(),", "parent.addEntry(entry) entry.getChild().setParent(parent) split_result = tree.splitNode(parent, partner_entry) l, ll, e, ee = split_result return", "return True return False # returns entries def doContainmentQuery(self, mbr): partial_result = []", "this, as internal nodes can temporarily look like leaf nodes # keep_nodes =", "parent.addEntry(partner_entry) partner_entry.getChild().setParent(parent) return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False) else: split_result = tree.rstarSplitNode(parent, partner_entry) l,", "\"+\" overall_str_list = [curr_leaf_status] else: overall_str_list = [] for entry in entries: child", "\"\"\" else: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries,", "[x.getMBR() for x in entry_collection1] mbr_collection2 = [x.getMBR() for x in entry_collection2] mbr1", "result_entry_list # def TopicKNearestNeighborBestFirstSearchHelper(self, heap, point, TopicKNearest, k): def getRectangleCloseDescendantsHelper(self, heap, reference_mbr, result_entry_list,", "correctly and add to priority queue curr_node = curr_entry.getChild() curr_mbr = curr_entry.getMBR() curr_mbr_is_actual", "= node.getParent() curr_entries = node.getEntries() entry = None if node.getParent() == None: entry", "first_entry, second_entry = resulting_entries_from_split partner_entry = second_entry if have_resulting_second_entry_from_split == True: partner_node =", "comp_b1 = min(upper_left_b[i], lower_right_b[i]) # b \"right\" comp_b2 = max(upper_left_b[i], lower_right_b[i]) # print", "def setChild(self, node): self.child = node @staticmethod def draw(tree, entries, image, depth): for", "= lower_right margin = 2 * (x2 - x1) + 2 * (y2", "entry.getMBR().toString() for close_ancestor_entry in close_ancestor_entry_list: print \"close ancestor:\", close_ancestor_entry.getMBR().toString() # raise Exception() #", "1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\" chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return", "is idempotent for added_node in added_nodes: node.addEntry(added_node.getEntry()) added_node.setParent(node) if split_status == RTree.SPLIT: #", "else: split_result = self.rstarSplitNode(self.getRootEntry().getChild(), ee) l, ll, e, ee = split_result resulting_entries_from_split =", "None: return (False, []) else: parent = node.getParent() curr_entries = node.getEntries() entry =", "+ 1) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(partner_entry) partner_entry.getChild().setParent(parent) return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False) else: split_result", "RTreeNode(None, [], True) entry7 = RTreeEntry(mbr7, node7) node7.setEntry(entry7) tree.insert(entry7) mbr8 = RawMBR(point8, (110,", "difference:\", time_diff, \"seconds\" # raise Exception() for entry_to_close_ancestor_entry_list_pair in result.items(): entry, close_ancestor_entry_list =", "entry matches the ignore entry continue if node.isLeafNode() == True: # could have", "x2, y2 = lower_right margin = 2 * (x2 - x1) + 2", "= ul_lr_pairs[i][1] # x = int(random.randint(1, 100)) # y = 10 # z", "= tagged_overlapped_mbr curr_node = curr_entry.getChild() result = self.findLeafHelper(entry, curr_node) if result == None:", "for x in low_mbr_pairs] low_margin_value_sum = sum(low_margin_values) S_comp_value += low_margin_value_sum upper_constituent_mbr_list_pairs = [([y.getMBR()", "partner_entries = partner_node.getEntries() partner_children = [x.getChild() for x in partner_entries] partner_mbr_list = [x.getMBR()", "x in entries] tagged_overlapped_mbr_list = [x for x in tagged_mbr_list if MBR.doOverlap(curr_mbr, x[0])", "n = 1000 # n = 20000 n = 1000 import math for", "print \"hello\" did_find_leaf = self.findLeaf(entry) child_node = entry.getChild() # root node never has", "point) return result_mbr def getContainedItem(self): return self.contained_item def getMBRList(self): return [self] def clone(self):", "\"\"\" if entry.getChild().getParent() == None: raise Exception() \"\"\" # print tree.toString() # for", "for i in xrange(4): \"\"\" ul_lr_pairs = [((797, 989, 602), (910, 1248, 1035)),", "tree.getRootEntry() curr_entries = entry.getChild().getEntries() children = [x.getChild() for x in curr_entries] mbr_list =", "and do not increase domains and # high inter-group overlap means maximal disjointedness", "resulting_entries_from_split l = e.getChild() ll = ee.getChild() if (self.getRootEntry().getChild().getNumEntries() + 1) <= self.getRootEntry().getChild().getMaximumNumEntriesPerNode():", "False: return False return True def toNumChildrenString(self): root = self.getRootEntry().getChild() return self.toNumChildrenStringHelper(root) def", "lower_right, mbr_list): MBR.__init__(self, upper_left, lower_right) self.mbr_list = mbr_list def getMBRList(self): return self.mbr_list def", "= [] for entry in entries: child = entry.getChild() child_str = self.toLeafStatusStringHelper(child) curr_str", "for x in self.getEntries()]) is_leaf_node = self.getNumChildren() == 0 return is_leaf_node def addEntry(self,", "priority_tagged_internal_entry = (priority, curr_entry) priority_tagged_internal_entries.append(priority_tagged_internal_entry) # item = curr_entry # pair = (priority,item)", "mbr = entry.getMBR() next_mbr = MBR.getEnlargedMBR(curr_mbr, mbr) node.getEntry().setMBR(next_mbr) # this parent-setting step is", "= [mbr_to_entry_dict[x[1]] for x in candidate_tagged_enlargement_values] return candidate_entries def chooseEntriesWithMinimalAreaEnlargement(self, entries, entry): mbr_to_entry_dict", "True def toNumChildrenString(self): root = self.getRootEntry().getChild() return self.toNumChildrenStringHelper(root) def toNumChildrenStringHelper(self, node): if node", "# for entry in entries[0 : 15]: for entry in entries: tree2.insert(entry) \"\"\"", "n = 20000 n = 1000 import math for i in xrange(n): upper_left", "len(heap) != 0: # entry = entry_pq.pop() item = None if len(heap) !=", "= self.getNumEntries() == 0 return is_traditional_leaf_node \"\"\" def isLeafNode(self): # is_leaf_node = (self.getParent()", "= None if node.getParent() == None: entry = tree.getRootEntry() else: entry = parent.retrieveEntryForChild(node)", "multiplier = 3 * 0.8 # offset = (768 * 0.2) / 2", "Exception() print tree.doOverlapQuery(curr_mbr2) # raise Exception() print tree.toString() # tree.delete(entry1) print tree.toString() #", "that our tree # has entry-aware nodes; made bug fix for adjustTree(); #", "we made it this far, we should add children to priority queue entries", "True: mbr_collection1 = [x.getMBR() for x in entry_collection1] mbr_collection2 = [x.getMBR() for x", "+ \")\" return overall_str def chooseEntriesWithMinimalOverlapEnlargement(self, entries, entry): mbr_to_entry_dict = {} for i", "entries: child = entry.getChild() child_str = self.toEntriesArePresentStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str =", "= mbr.getUpperLeft() lower_right2 = mbr.getLowerRight() is_equal = upper_left1 == upper_left2 and lower_right1 ==", "child = entry.getChild() child_str = self.toEntriesArePresentStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\"", "disjointedness\" # and depth-first stack for internal nodes and # best-first priority queue", "self.doEnclosureQueryHelper(mbr, curr_entry, partial_result) def doEnclosureQueryWithEarlyStopping(self, mbr): result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, self.getRootEntry()) return result def", "center_y = next_y1 radius = 4 perimeter_x = next_x1 perimeter_y = next_y1 +", "pass \"\"\" node1 = RTreeNode(parent, entry_group1, prev_leaf_status) node2 = RTreeNode(parent, entry_group2, prev_leaf_status) for", "curr_node in node.getChildren(): result = self.hasConsistentNonTraditionalLeafDepthValuesHelper(curr_node, depth, curr_depth + 1) if result ==", "print \"mbr:\", curr_entry.getMBR().toString() # print \"tree:\", self.toString() self.insert(curr_entry) def condenseTreeHelper(self, node, Q): #", "== True: return True return False # returns entries def doContainmentQuery(self, mbr): partial_result", "[] if len(entries) > (M + 1): raise Exception() window_left_sizes = [m -", "curr_entry != entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node1) for curr_entry in entry_group2: next_curr_node = curr_entry.getChild() if", "E is found # remove E from L # call algorithm condenseTree(L) #", "raise Exception() if do_fail == True or len(entry_collection3) < node.getMinimumNumEntriesPerNode() or len(entry_collection4) <", "overall_str_list = None if have_node_str == True: curr_depth = \"-\" if node.getNumEntries() !=", "result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2 = result parent = curr_node.getParent()", "50, 0), (100, 100, 0), 1)) tree = RTree() print tree.toString() curr_root =", "[x.getLowerRight() for x in mbr_list] points = upper_left_points + lower_right_points min_components = []", "node5) node5.setEntry(entry5) tree.insert(entry5) mbr6 = RawMBR(point6, (110, 200, 100), point6) node6 = RTreeNode(None,", "have_node_str == True: curr_leaf_status = \"-\" if (node.getParent() == None or (node.getParent() !=", "(100, 10100) and # lower-right's in (ul_i, ul_i + 10000) # two strange", "k = int(round((1000 * math.log(1000, 2)) ** (1 / 3.0) / denominator)) #", "node2) node2.setEntry(entry2) tree.insert(entry2) mbr3 = RawMBR(point3, (110, 200, 100), point3) node3 = RTreeNode(None,", "import heapq from collections import deque # min-pq class PriorityQueue: def __init__(self): self.heap", "10, 10), (8, 10, 10), (6, 10, 10), (9, 10, 10), (6, 10,", "overlap_ratio > RTree.MAX_OVERLAP_RATIO: # raise Exception() result2 = self.xtreeOverlapMinimalSplit(node, entry) entry_collection3, entry_collection4, dimension,", "if mbr1.isEqualTo(mbr2) == True: overlap_ratio = 1 else: overlap_ratio = 0 else: overlap_ratio", "if node.getParent() == None: # we are a root node if self.getRootEntry().getChild().getNumChildren() ==", "entry1) entry1.setChild(node1) entry2 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection2), None) node2 = RTreeNode(parent, entry_collection2, None, entry2) entry2.setChild(node2)", "root_entry = self.getRootEntry() root_node = root_entry.getChild() root_mbr = root_entry.getMBR() root_mbr_is_actual = root_mbr.isRaw() root_mbr_is_contained", "isLeafNode(self): # is_leaf_node = (self.getParent() == None and self.getNumChildren() == 0) or (self.getNumChildren()", "we implement delete(); note that our tree # has entry-aware nodes; made bug", "* curr_mbr_area # min-pq # priority = (first_priority_component, second_priority_component) if curr_mbr.isRaw() == True:", "RTreeNode(None, [], True) next_entry = RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) conflict_x_tree.insert(next_entry) elif node.isLeafNode() == False:", "return overall_str def toEntriesArePresentString(self): root = self.getRootEntry().getChild() return self.toEntriesArePresentStringHelper(root) def toEntriesArePresentStringHelper(self, node): if", "entries: curr_child = curr_entry.getChild() (self.child_to_entry_dict)[curr_child] = curr_entry self.split_history_root_dimension = split_history_root_dimension self.is_supernode = is_supernode", "= RTreeNode(None, [], True) entry = RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry) \"\"\" for i", "node1) entry2 = RTreeEntry(curr_overall_mbr2, node2) node1.setEntry(entry1) node2.setEntry(entry2) if parent != None: original_entry =", "CompositeMBR(None, None, None) root_entry = RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) return else: entry =", "upper_left = self.getUpperLeft() lower_right = self.getLowerRight() contained_item = self.getContainedItem() mbr = RawMBR(upper_left, lower_right,", "== True: if depth != curr_depth: return False else: return True else: for", "= None if leaf_node.isFull() == False: leaf_node.addEntry(entry) entry.getChild().setParent(leaf_node) adjust_result = RTree.rstarAdjustTree(self, leaf_node, [entry],", "= max(upper_left_a[i], lower_right_a[i]) # b \"left\" comp_b1 = min(upper_left_b[i], lower_right_b[i]) # b \"right\"", "start_rectangle_entry in start_rectangle_entries: close_descendant_entries = self.getRectangleCloseDescendants(start_rectangle_entry) for close_descendant_entry in close_descendant_entries: start_rectangle_to_close_ancestor_entries_dict[close_descendant_entry].append(start_rectangle_entry) return start_rectangle_to_close_ancestor_entries_dict", "out close descendant candidates on occasion, # if containment query for conflict x-tree", "def draw(self): # im = Image.new(\"RGB\", (512, 512), \"white\") \"\"\" im = Image.new(\"RGB\",", "None: original_entry = parent.retrieveEntryForChild(curr_node) parent.removeEntry(original_entry) if node != self.getRootEntry().getChild(): parent.addEntry(entry1) parent.addEntry(entry2) node1.setParent(parent) node2.setParent(parent)", "# if node.isNonTraditionalLeafNode() == False: # this is idempotent for added_node in added_nodes:", "[x.getMBR() for x in entry_collection2] # this line presumes that we have parent", "ll, e, ee = split_result return tree.adjustTree(tree, l, [e, ee], True, False) else:", "for start rectangle taken from set of actual rectangles # for an r-tree", "+ offset radius = 2 perimeter_x = next_x + offset perimeter_y = next_y", "an actual rectangle # decide whether to include associated entry in result; #", "def getRectangleCloseDescendantsHelper(self, heap, reference_mbr, result_entry_list, ignore_entry): conflict_x_tree = RTree() internal_node_stack_deque = deque() #", "return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False) else: split_result = tree.rstarSplitNode(parent, partner_entry) l, ll, e,", "curr_mbr = curr_entry.getMBR() curr_mbr_is_actual = curr_mbr.isRaw() curr_mbr_is_contained = reference_mbr.doesEnclose(curr_mbr) curr_mbr_area = curr_mbr.getArea() first_priority_component", "containing node; just look at parent of entry child if curr_entry.getMBR().isRaw() == True:", "in low_comp_distributions] low_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x in low_constituent_mbr_list_pairs] low_margin_values = [x[0].getMarginValue()", "for x in upper_mbr_pairs] upper_margin_value_sum = sum(upper_margin_values) S_comp_value += upper_margin_value_sum S_comp_dict[i] = S_comp_value", "curr_node) if result == None: continue else: return curr_node return None \"\"\" #", "+ 1) class MBR: def __init__(self, upper_left, lower_right): self.upper_left = upper_left self.lower_right =", "return self.toDepthStringHelper(root, 0) def toDepthStringHelper(self, node, depth): if node == None: return \"\"", "chosen_entry def xtreeInsert(self, entry): # print \"insert\" return self.xtreeInsertHelper(entry, self.getRootEntry().getChild()) SPLIT = 0", "((803, 1054, 307), (1776, 1597, 501)), \\ ((803, 233, 521), (1314, 717, 1487)),", "min(upper_left_a[i], lower_right_a[i]) # a \"right\" comp_a2 = max(upper_left_a[i], lower_right_a[i]) # b \"left\" comp_b1", "Exception() # for entry in entries[0 : 15]: for entry in entries: #", "= child_node.getParent() if entry != self.getRootEntry() else None if leaf_node == None: raise", "next_y1 radius = 4 perimeter_x = next_x1 perimeter_y = next_y1 + radius image.draw(PythonMagick.DrawableCircle(center_x,", "= 10000 # 84.222 seconds (~197x slower for 100x growth; expected 664x slower)", "has a raw mbr # leaf is a non-traditional leaf leaf_node = child_node.getParent()", "fix margin calculation # note that we assume rectangles are unique for close-descendant", "contained_item) return mbr def doesMatch(self, mbr): upper_left_matches = self.getUpperLeft() == mbr.getUpperLeft() lower_right_matches =", "else: for curr_node in node.getChildren(): result = self.hasConsistentNonTraditionalLeafDepthValuesHelper(curr_node, depth, curr_depth + 1) if", "point6) node6 = RTreeNode(None, [], True) entry6 = RTreeEntry(mbr6, node6) node6.setEntry(entry6) tree.insert(entry6) mbr7", "[] for entry in entries: child = entry.getChild() child_str = self.toEntriesArePresentStringHelper(child) curr_str =", "return self.toEntriesArePresentStringHelper(root) def toEntriesArePresentStringHelper(self, node): if node == None: return \"\" entries =", "True: if entry == curr_entry: return True else: return False else: entries =", "# for start rectangle taken from set of actual rectangles # for an", "def findLeaf(self, entry): return self.findLeafHelper(entry, self.getRootEntry()) def findLeafHelper(self, entry, curr_entry): \"\"\" if node.isLeafNode()", "if entry.getMBR().isRaw() == True: if MBR.doOverlap(entry.getMBR(), mbr, without_borders) == True: partial_result.append(entry) else: entries", "else: return node.getParent() else: entries = node.getEntries() candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry) if len(candidate_entries)", "str(node)] for entry in entries: child = entry.getChild() child_str = self.toStringHelper(child) curr_str =", "RTreeEntry: def __init__(self, mbr, child): self.mbr = mbr self.child = child def getMBR(self):", "toStringHelper(self, node): if node == None: return \"\" entries = node.getEntries() children =", "+ 2) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(entry) parent.addEntry(partner_entry) entry.getChild().setParent(parent) partner_entry.getChild().setParent(parent) return tree.adjustTree(tree, parent, [entry], False,", "MBR.getEnlargedMBR(base_mbr, mbr) enlarged_mbr_area = enlarged_mbr.getArea() area_change = enlarged_mbr_area - base_mbr_area return area_change @staticmethod", "not going to be good enough to cut down branches explored; # to", "deque # min-pq class PriorityQueue: def __init__(self): self.heap = [] def push(self, item,", "if RN is a leaf node # search all entries of RN to", "return elif node.isLeafNode() == True: if depth != curr_depth: return False else: return", "((879, 319, 789), (1877, 744, 791)), \\ ((1081, 1056, 1020), (1708, 1075, 1542)),", "of high-level to low-level # Q = list(set(Q)) Q.reverse() for curr_node in Q:", "@staticmethod def rstarAdjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split): return tree.rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split) @staticmethod def", "if node.isNonTraditionalLeafNode() == True: if node.isLeafNode() == True and node == self.getRootEntry().getChild(): node.addEntry(entry)", "and x >= m and (len(entries) - x) <= M and (len(entries) -", "= tree.getRootEntry().getChild() mbr1 = RawMBR(point1, (110, 200, 100), point1) node1 = RTreeNode(None, [],", "= [] for i in xrange(dimension): comp_a1 = upper_left_a[i] comp_a2 = lower_right_a[i] comp_b1", "ee) l, ll, e, ee = split_result resulting_entries_from_split = [e, ee] next_root =", "= RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) def getRootEntry(self): return self.root_entry def setRootEntry(self, root_entry): self.root_entry", "= self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() axis = RTree.rstarChooseSplitAxis(E_overall, M, m) result = RTree.rstarChooseSplitIndex(E_overall,", "= 3 * 0.8 # offset = (768 * 0.2) / 2 offset", ": window_left_sizes[j]], low_sorted_entries[window_left_sizes[j] : ]) for j in xrange(len(window_left_sizes))] upper_sorted_entries = entries[ :", "result == True: return result return False def delete(self, entry): # print \"hello\"", "# for n = 1000 # k = int(round((5500 * math.log(5500, 2)) **", "= node.getChildren() have_node_str = True is_root_node = node == self.getRootEntry().getChild() if is_root_node ==", "turn a non-point mbr to a point\") return mbr.getUpperLeft() def getVec(self): return self.vec", "hasConsistentNonTraditionalLeafDepthValuesHelper(self, node, depth, curr_depth): if node == None: return elif node.isLeafNode() == True:", "priority_tagged_internal_entries: priority, internal_entry = priority_tagged_internal_entry item = internal_entry internal_node_stack_deque.appendleft(item) # print \"conflict x-tree:\",", "node.setEntry(entry) # entries.append(entry) \"\"\" # for entry in entries[0 : 4]: # for", "for curr_entry in entries: if curr_entry.getMBR().doesEnclose(mbr) == True: self.doEnclosureQueryHelper(mbr, curr_entry, partial_result) def doEnclosureQueryWithEarlyStopping(self,", "upper_left = self.getUpperLeft() lower_right = self.getLowerRight() if self.getDimension() == 0: raise Exception() if", "= None next_mbr = None if True: # if node.getNumChildren() == 0 and", "145x growth; expected 1040x slower) # n = 20000 # 230.0411 seconds (~538x", "priority correctly and add to priority queue curr_node = curr_entry.getChild() curr_mbr = curr_entry.getMBR()", "upper_left[i] comp2 = lower_right[i] side = comp2 - comp1 sides.append(side) area = reduce(lambda", "min_components.append(min_comp_value) max_components.append(max_comp_value) upper_left_point = tuple(min_components) lower_right_point = tuple(max_components) result_mbr_list = base_mbr.getMBRList() + [mbr]", "list of entries def getRectangleCloseDescendants(self, reference_entry): # repeatedly pop nodes, prune using enclosure/containment", "x, y: x * y, sides) return area @staticmethod def getEnlargedMBR(base_mbr, mbr): mbr_list", "= [x for x in tagged_mbr_list if MBR.doOverlap(curr_mbr, x[0]) == True] for tagged_overlapped_mbr", "node, resulting_entries_from_split, have_resulting_second_entry_from_split) @staticmethod def rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split): if node.getParent() == None:", "= int(round((14500 * math.log(14500, 2)) ** (1 / 3.0) / denominator)) # for", "string.join(overall_str_list, \" \") + \")\" return overall_str def chooseEntriesWithMinimalOverlapEnlargement(self, entries, entry): mbr_to_entry_dict =", "heapq.heappush(heap,pair) elif curr_mbr.isRaw() == False: if curr_mbr.doesEnclose(reference_mbr) == False and reference_mbr.doesEnclose(curr_mbr) == False:", "entries: # if len(tree.getNodes()) != 0: # print \"removing entry with mbr:\", entry.getMBR().toString()", "for x in candidate_tagged_enlargement_values] return candidate_entries def resolveEnlargementTie(self, entries, entry): mbr = entry.getMBR()", "overlap_value_tagged_candidate_distributions = [(MBR.findOverlapArea(x[0][0], x[0][1]), x[1]) for x in mbr_pair_tagged_candidate_distributions] overlap_values = [x[0] for", "2)) ** (1 / 3.0) / denominator)) # for n = 5500 #", "combined_area_tagged_next_candidate_distributions = [(x[0][0].getArea() + x[0][1].getArea(), x[1]) for x in mbr_pair_tagged_next_candidate_distributions] combined_area_values = [x[0]", "def setParent(self, node): self.parent = node def isNonTraditionalLeafNode(self): is_non_traditional_leaf_node = (self.getParent() == None", "= tuple(min_components) lower_right_point = tuple(max_components) result_mbr_list = base_mbr.getMBRList() + [mbr] mbr = CompositeMBR(upper_left_point,", "self.m def getMaximumNumEntriesPerNode(self): return self.M def isFull(self): return self.getNumEntries() >= self.getMaximumNumEntriesPerNode() def isUnderfull(self):", "x in tagged_enlargement_values] min_enlargement_value = min(enlargement_values) candidate_tagged_enlargement_values = [x for x in tagged_enlargement_values", "self.entry = entry def isSuperNode(self): return self.is_supernode def setToSuperNode(self, is_supernode): self.is_supernode = is_supernode", "x1, y1 = upper_left x2, y2 = lower_right margin = 2 * (x2", "TopicKNearestNeighborBestFirstSearchHelper(self, heap, point, TopicKNearest, k): def getRectangleCloseDescendantsHelper(self, heap, reference_mbr, result_entry_list, ignore_entry): conflict_x_tree =", "\"(\" + string.join(overall_str_list, \" \") + \")\" return overall_str def toString(self): root =", "parent.getMaximumNumEntriesPerNode(): parent.addEntry(partner_entry) partner_entry.getChild().setParent(parent) return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False) else: split_result = tree.rstarSplitNode(parent, partner_entry)", "for x in pair_list] return items def getSize(self): return len(self.heap) import math def", "in combined_area_tagged_next_candidate_distributions] min_combined_area_value = min(combined_area_values) matching_combined_area_tagged_next_candidate_distributions = [x for x in combined_area_tagged_next_candidate_distributions if", "candidate_distributions = result[axis][0] + result[axis][1] mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for y in x[0]], [y.getMBR()", "return len(self.heap) import math def getDistance(point1, point2): x1, y1 = point1 x2, y2", "pass # print \"no split\" return (RTree.NO_SPLIT, [node]) def rstarInsert(self, entry): leaf_node =", "in keep_nodes: Q.append(keep_node) # only makes sense to speak of modifying mbr if", "curr_entry in entries: if curr_entry.getMBR().doesEnclose(mbr) == True: result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, curr_entry) if result", "= None lower_right = None \"\"\" if i % 4 == 0: upper_left", "= node.getParent() curr_entries = node.getEntries() entry = parent.retrieveEntryForChild(node) children = [x.getChild() for x", "= [] # lower_rights = [(3, 10, 10), (1, 10, 10), (8, 10,", "= next_x1 perimeter_y = next_y1 + radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y)) else: image.strokeColor(color)", "mbr_list = [x.getMBR() for x in entries] mbr = entry.getMBR() tagged_enlargement_values = [(MBR.getAreaEnlargement(x,", "\"time difference:\", time_diff, \"seconds\" # raise Exception() for entry_to_close_ancestor_entry_list_pair in result.items(): entry, close_ancestor_entry_list", "that we don't necessarily need PythonMagick # note that nodes always point to", "= parent self.is_leaf = is_leaf self.m = 8 self.M = 16 self.child_to_entry_dict =", "(6, 10, 10), (9, 10, 10), (3, 10, 10), (1, 10, 10), (3,", "1: next_candidate_distributions = next_next_candidates mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for y in x[0]], [y.getMBR() for", "base_mbr_area return area_change @staticmethod def doOverlap(mbr_a, mbr_b, without_borders = False): upper_left_a = mbr_a.getUpperLeft()", "upper_left2 and lower_right1 == lower_right2 return is_equal class RawMBR(MBR): def __init__(self, upper_left, lower_right,", "next_y + offset radius = 2 perimeter_x = next_x + offset perimeter_y =", "1)) tree = RTree() print tree.toString() curr_root = tree.getRootEntry().getChild() mbr1 = RawMBR(point1, (110,", "else str(depth) overall_str_list = [curr_depth] else: overall_str_list = [] for entry in entries:", "expected 1x slower) # n = 1000 # 1.1649 seconds (~2.72x slower for", "to conflict x-tree, # use as priority (prefer_contained, prefer_large_area_if_contained_else_small) if self.getRootEntry().getChild().getNumChildren() == 0:", "True: self.doContainmentQueryHelper(mbr, curr_entry, partial_result) # prefix order def getNodes(self): node_list = [] self.getNodesHelper(self.getRootEntry().getChild(),", "entries, image, depth + 1) class MBR: def __init__(self, upper_left, lower_right): self.upper_left =", "= self.rstarSplitNode(self.getRootEntry().getChild(), ee) l, ll, e, ee = split_result resulting_entries_from_split = [e, ee]", "add to conflict x-tree result_entry_list.append(entry) raw_mbr = mbr next_mbr = raw_mbr.clone() next_node =", "if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\" else: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries,", "- overlap_area ovelap_ratio = None if union_area == 0: if mbr1.isEqualTo(mbr2) == True:", "k): def getRectangleCloseDescendantsHelper(self, heap, reference_mbr, result_entry_list, ignore_entry): conflict_x_tree = RTree() internal_node_stack_deque = deque()", "self.child_to_entry_dict = {} for curr_entry in entries: curr_child = curr_entry.getChild() (self.child_to_entry_dict)[curr_child] = curr_entry", "chosen_entry.getChild() return chosen_entry def xtreeInsert(self, entry): # print \"insert\" return self.xtreeInsertHelper(entry, self.getRootEntry().getChild()) SPLIT", "m) entry_group1, entry_group2 = result next_result = (entry_group1, entry_group2, axis, False) return next_result", "= self.toStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \" \")", "= (entry_group1, entry_group2, axis, False) return next_result def xtreeSupernodeInsert(self, node, entries): if node.isSuperNode()", "node, entry): curr_node = node E_overall = list(set(curr_node.getEntries() + [entry])) return self.rstarSplitNodeHelper(node, E_overall,", "correctly for a leaf, # which is not the case when we initially", "else: parent = node.getParent() curr_entries = node.getEntries() entry = None if node.getParent() ==", "internal nodes and # best-first priority queue for leaf nodes # updated on", "resulting_entries_from_split, have_resulting_second_entry_from_split): return tree.rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split) @staticmethod def rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split):", "if leaf_node == None: raise Exception(\"expected a node to be found for a", "unless we explicitly create new entries, # which we do do occasionally #", "curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) @staticmethod def rstarAdjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split): return tree.rstarAdjustTreeHelper(tree,", "= (-1 if root_mbr_is_contained == True else 1) * root_mbr_area # min-pq priority", "if self.getDimension() == 0: raise Exception() if self.getDimension() == 1: x1 = upper_left[0]", "have_resulting_second_entry_from_split == True and is_first_call_after_first_pass != True: partner_node = partner_entry.getChild() partner_entries = partner_node.getEntries()", "x next_y = multiplier * y image.strokeColor(\"none\") image.fillColor(\"black\") center_x = next_x + offset", "\"\"\" # a little stilted since we don't need a O(log(n)) time operation", "= (10, 10) mbr = RawMBR(upper_left, lower_right, None) node = RTreeNode(None, [], True)", "print \"start rectangle:\", entry.getMBR().toString() for close_ancestor_entry in close_ancestor_entry_list: print \"close ancestor:\", close_ancestor_entry.getMBR().toString() if", "is_traditional_leaf_node \"\"\" def isLeafNode(self): # is_leaf_node = (self.getParent() == None and self.getNumChildren() ==", "# print \"no split\" return (RTree.NO_SPLIT, [node]) def rstarInsert(self, entry): leaf_node = self.rstarChooseLeaf(entry)", "node1.setParent(next_root) node2.setParent(next_root) pass return (node1, node2, entry1, entry2) @staticmethod def rstarPreadjustTree(self, leaf_node): node", "multiplier * x next_y = multiplier * y image.strokeColor(\"none\") image.fillColor(\"black\") center_x = next_x", "node == None: return \"\" entries = node.getEntries() children = node.getChildren() have_node_str =", "getVec(self): return self.vec def getComponent(self, d): return self.getVec()[d] def getIDValue(self): return self.id_value import", "is not the case when we initially insert parent = node.getParent() entry1 =", "0: pass return (RTree.NO_SPLIT, [node]) \"\"\" follow = self.chooseSubtree(entry, node).getChild() result = self.xtreeInsertHelper(entry,", "True: if depth != curr_depth: return False else: return True else: for curr_node", "return margin surface_area = 0 for i in xrange(self.getDimension()): comp_1a = upper_left[i] comp_1b", "pop nodes, prune using enclosure/containment # w.r.t. reference rectangle, add children to priority", "curr_mbr in mbr_list: next_mbr = RawMBR(curr_mbr.getUpperLeft(), curr_mbr.getLowerRight(), None) next_node = RTreeNode(None, [], True)", "for an r-tree; # takes O(n * log(n)) time at worst; # assumes", "node7 = RTreeNode(None, [], True) entry7 = RTreeEntry(mbr7, node7) node7.setEntry(entry7) tree.insert(entry7) mbr8 =", "+ offset) if depth != 0: pass color_choice = depth % 3 color", "= False break return does_enclose def isEqualTo(self, mbr): upper_left1 = self.getUpperLeft() lower_right1 =", "return (RTree.SUPERNODE, [node]) elif split_status == RTree.SUPERNODE: pass # print \"no split\" return", "necessary for entry in entries: curr_node = entry.getChild() node.addEntry(entry) # needed this curr_node.setParent(node)", "point8 = (110, 100, 0) curr_mbr1 = RawMBR((100, 100, 0), (100, 100, 0),", "O(n * log(n)) time; # these times involve n, which is number of", "if node == None: return elif node.isLeafNode() == True: if depth != curr_depth:", "None if no match is found # finds one match if such a", "= 10,000 works in 1 min. 54 sec. for pypy with m =", "self.getUpperLeft() lower_right = self.getLowerRight() sides = [] for i in xrange(self.getDimension()): comp1 =", "100, 0) point7 = (90, 100, 0) point8 = (110, 100, 0) curr_mbr1", "def getChild(self): return self.child def setChild(self, node): self.child = node @staticmethod def draw(tree,", "= (x, y, z) # lower_right = lower_rights[i] mbr = RawMBR(upper_left, lower_right, None)", "entry.getChild().setParent(node) \"\"\" elif node.getNumChildren() == 0: pass return (RTree.NO_SPLIT, [node]) \"\"\" follow =", "0), (100, 100, 0)) curr_mbr2 = RawMBR((50, 100, 0), (50, 100, 0), point3)", "= self.getLowerRight() if self.getDimension() == 0: raise Exception() if self.getDimension() == 1: x1", "# print \"removing entry with mbr:\", entry.getMBR().toString() # print \"tree, currently:\", tree.toString() #", "[y.getMBR() for y in x[1]]) for x in upper_comp_distributions] upper_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1]))", "upper_sorted_entries[window_left_sizes[j] : ]) for j in xrange(len(window_left_sizes))] curr_tuple = (low_distributions, upper_distributions) result_list.append(curr_tuple) return", "None, entry1) entry1.setChild(node1) entry2 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection2), None) node2 = RTreeNode(parent, entry_collection2, None, entry2)", "on - saturation occurs # if we increase n and do not increase", "rectangle:\", entry.getMBR().toString() for close_ancestor_entry in close_ancestor_entry_list: print \"close ancestor:\", close_ancestor_entry.getMBR().toString() # raise Exception()", "curr_entries] mbr_list = [x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) @staticmethod", "surface_area += term margin = surface_area return margin def toString(self): upper_left = self.getUpperLeft()", "return partial_result def doEnclosureQueryHelper(self, mbr, entry, partial_result): if entry.getMBR().isRaw() == True: if entry.getMBR().doesEnclose(mbr)", "assumes that rectangles are distinct def getAllRectangleCloseAncestors(self): start_rectangle_nodes = [x for x in", "None \"\"\" if node.getParent() == None: entry = tree.getRootEntry() else: entry = node.getParent().retrieveEntryForChild(node)", "note that we assume rectangles are unique for close-descendant # and close-ancestor finding;", "lower_right_point = tuple(max_components) result_mbr_list = base_mbr.getMBRList() + [mbr] mbr = CompositeMBR(upper_left_point, lower_right_point, result_mbr_list)", "self.parent def getEntries(self): return (self.child_to_entry_dict).values() def getEntryForChild(self, child_node): return (self.child_to_entry_dict)[child_node] def getChildren(self): return", "* root_mbr_area # min-pq priority = (first_priority_component, second_priority_component) # priority = -1 *", "self.is_supernode = is_supernode def getSplitHistoryRootDimension(self): return self.split_history_root_dimension def setSplitHistoryRootDimension(self, dim): self.split_history_root_dimension = dim", "# for n = 5500 # k = int(round((10000 * math.log(10000, 2)) **", "for x in entry_group1] mbr_group2 = [x.getMBR() for x in entry_group2] curr_overall_mbr1 =", "term margin = surface_area return margin def toString(self): upper_left = self.getUpperLeft() lower_right =", "return overall_str def toLeafStatusString(self): root = self.getRootEntry().getChild() return self.toLeafStatusStringHelper(root) def toLeafStatusStringHelper(self, node): if", "= True if without_borders == True: do_overlap = do_overlap and comp_a1 < comp_b2", "= curr_entry.getChild() result = self.findLeafHelper(entry, curr_node) if result == None: continue else: return", "L that contains E is found # remove E from L # call", "partial_result.append(node) for curr_node in node.getChildren(): self.getNodesHelper(curr_node, partial_result) def getNodesForNode(self, node): node_list = []", "second_priority_component) if curr_mbr.isRaw() == True: priority = -1 * curr_mbr_area item = curr_entry", "self.getRootEntry().getChild().getNumChildren() == 0: return [] reference_mbr = reference_entry.getMBR() root_entry = self.getRootEntry() root_node =", "self.getRootEntry().getChild() return self.toEntriesArePresentStringHelper(root) def toEntriesArePresentStringHelper(self, node): if node == None: return \"\" entries", "0 and False not in [x.getChild().getNumEntries() == 0 for x in self.getEntries()]) return", "the entry containing node; just look at parent of entry child if curr_entry.getMBR().isRaw()", "# image = PythonMagick.Image(PythonMagick.Geometry(\"768x768\"), \"white\") image = PythonMagick.Image(PythonMagick.Geometry(\"1536x1536\"), \"white\") root_entry = self.getRootEntry() entries", "0) point2 = (40, 100, 0) point3 = (50, 100, 0) point4 =", "(1536 * 0.2) / 2 x1 = 0 y1 = 0 x2 =", "# n = 100 # 0.427 seconds (~1x slower for 1x growth; expected", "True) entry1 = RTreeEntry(mbr1, node1) node1.setEntry(entry1) tree.insert(entry1) mbr2 = RawMBR(point2, (110, 200, 100),", "reverse = True) for priority_tagged_internal_entry in priority_tagged_internal_entries: priority, internal_entry = priority_tagged_internal_entry item =", "curr_mbr.isRaw() curr_mbr_is_contained = reference_mbr.doesEnclose(curr_mbr) curr_mbr_area = curr_mbr.getArea() first_priority_component = 0 if curr_mbr_is_contained ==", "comp_1b - comp_1a for j in xrange(i + 1, self.getDimension()): comp_2a = upper_left[j]", "+ offset next_y2 = y2 * multiplier + offset \"\"\" # image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1,", "makeMBRFromPoint(point): upper_left = point lower_right = point result_mbr = RawMBR(upper_left, lower_right, point) return", "= curr_entry self.split_history_root_dimension = split_history_root_dimension self.is_supernode = is_supernode self.entry = entry def getEntry(self):", "min_comp_value = min(components) max_comp_value = max(components) min_components.append(min_comp_value) max_components.append(max_comp_value) upper_left_point = tuple(min_components) lower_right_point =", "y in x[0]], [y.getMBR() for y in x[1]]), x) for x in candidate_distributions]", "int(100 + random.randint(0, k) * 100) # x2 = int(x1 + random.random() *", "raise Exception() if self.getDimension() == 1: x1 = upper_left[0] x2 = lower_right[0] margin", "if node.getSplitHistoryRootDimension() == None: return (None, None, None, True) else: m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode()", "# a \"right\" comp_a2 = max(upper_left_a[i], lower_right_a[i]) # b \"left\" comp_b1 = min(upper_left_b[i],", "in upper_comp_distributions] upper_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x in upper_constituent_mbr_list_pairs] upper_margin_values = [x[0].getMarginValue()", "on 2016-11-06 to add single-start-rectangle-based # close-descendant finding that takes O(log(n)) time on", "i in xrange(entries[0].getMBR().getDimension()): low_comp_distributions, upper_comp_distributions = result[i] S_comp_value = 0 low_constituent_mbr_list_pairs = [([y.getMBR()", "# unless we explicitly create new entries, # which we do do occasionally", "domain has to grow with n # n = 100 # 0.427 seconds", "offset next_x2 = x2 * multiplier + offset next_y2 = y2 * multiplier", "entry): # print \"hello\" did_find_leaf = self.findLeaf(entry) child_node = entry.getChild() # root node", "in d_S_pairs]) min_S_value_d_S_pair_candidates = [x for x in d_S_pairs if x[1] == min_S_value]", "= root_entry.getChild() root_mbr = root_entry.getMBR() root_mbr_is_actual = root_mbr.isRaw() root_mbr_is_contained = reference_mbr.doesEnclose(root_mbr) root_mbr_area =", "# min-pq # priority = (first_priority_component, second_priority_component) if curr_mbr.isRaw() == True: priority =", "to grow with n # n = 100 # 0.427 seconds (~1x slower", "isNonTraditionalLeafNode(self): is_non_traditional_leaf_node = (self.getParent() == None and self.getNumChildren() == 0) or (self.getNumChildren() !=", "# assumes that rectangles are distinct # return a list of entries def", "# lower_right = lower_rights[i] mbr = RawMBR(upper_left, lower_right, None) node = RTreeNode(None, [],", "x in mbr_list_pair_tagged_candidate_distributions] overlap_value_tagged_candidate_distributions = [(MBR.findOverlapArea(x[0][0], x[0][1]), x[1]) for x in mbr_pair_tagged_candidate_distributions] overlap_values", "= [x[0].getMarginValue() + x[1].getMarginValue() for x in low_mbr_pairs] low_margin_value_sum = sum(low_margin_values) S_comp_value +=", "# lower-right's in (ul_i, ul_i + 10000) # two strange things going on", "find the entry containing node; just look at parent of entry child if", "be good enough to cut down branches explored; # to counter saturation, domain", "growth; expected 33x slower) # n = 5500 # 23.899 seconds (~55.96x slower", "== True: if node.isLeafNode() == True and node == self.getRootEntry().getChild(): node.addEntry(entry) curr_node =", "/ 3.0) / denominator)) # for n = 1000 # k = int(round((5500", "curr_entry in entry_group2: curr_entry.getChild().setParent(node2) mbr_group1 = [x.getMBR() for x in entry_group1] mbr_group2 =", "entry): return self.xtreeInsert(entry) def chooseSubtree(self, entry, node): entries = node.getEntries() candidate_entries = None", "100, 0), HyperRectangle((50, 50, 0), (100, 100, 0), 1)) tree = RTree() print", "RTreeNode(None, [], True) entry6 = RTreeEntry(mbr6, node6) node6.setEntry(entry6) tree.insert(entry6) mbr7 = RawMBR(point7, (110,", "query for conflict x-tree returns entries matching_entries = conflict_x_tree.doContainmentQuery(mbr) for matching_entry in matching_entries:", "2 perimeter_x = next_x + offset perimeter_y = next_y + offset + radius", "combined_area_tagged_next_candidate_distributions if x[0] == min_combined_area_value] next_next_candidates = [x[1] for x in matching_combined_area_tagged_next_candidate_distributions] chosen_distribution_pair", "curr_mbr = entry.getMBR() entries = self.getEntries() tagged_mbr_list = [(x.getMBR(), x) for x in", "log(n)) time at worst; # assumes that rectangles are distinct # return a", "a leaf where the leaf mbr # is not contained by reference rectangle;", "# root node never has a raw mbr # leaf is a non-traditional", "= [(low_sorted_entries[ : window_left_sizes[j]], low_sorted_entries[window_left_sizes[j] : ]) for j in xrange(len(window_left_sizes))] upper_sorted_entries =", "if node.isLeafNode() == False else \"+\" overall_str_list = [curr_leaf_status] else: overall_str_list = []", "leaf mbr # is not contained by reference rectangle; # check explicitly for", "if necessary if node.isSuperNode() == True and node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): node.setToSuperNode(False) if node.getParent()", "node.getNumChildren() == 0 else [node.getEntry().getMBR().toString(), str(node)] for entry in entries: child = entry.getChild()", "change_x = x2 - x1 change_y = y2 - y1 distance = math.sqrt(change_x", "= tree.getRootEntry() else: entry = parent.retrieveEntryForChild(node) children = [x.getChild() for x in curr_entries]", "2 offset = (1536 * 0.2) / 2 x1 = 0 y1 =", "is_leaf_node = (self.getParent() == None and self.getNumChildren() == 0) or (self.getNumChildren() != 0", "don't use isLeafNode() for this, as internal nodes can temporarily look like leaf", "1)] window_left_sizes = [x for x in window_left_sizes if x <= M and", "leaves; # assumes that rectangles are distinct def getAllRectangleCloseAncestors(self): start_rectangle_nodes = [x for", "# otherwise the directed graph implied by the r-tree # is not acyclic", "clone(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() contained_item = self.getContainedItem() mbr = RawMBR(upper_left,", "= min(combined_area_values) matching_combined_area_tagged_next_candidate_distributions = [x for x in combined_area_tagged_next_candidate_distributions if x[0] == min_combined_area_value]", "in candidate_distributions] mbr_pair_tagged_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for x in mbr_list_pair_tagged_candidate_distributions] overlap_value_tagged_candidate_distributions =", "else 1) * curr_mbr_area # min-pq # priority = (first_priority_component, second_priority_component) if curr_mbr.isRaw()", "for 10x growth; expected 33x slower) # n = 5500 # 23.899 seconds", "math.log(20000, 2)) ** (1 / 3.0) / denominator)) # for n = 20000", "curr_node = curr_node.getChildren()[0] depth = depth + 1 return self.hasConsistentNonTraditionalLeafDepthValuesHelper(root, depth, 0) def", "currently:\", tree.toString() # tree2.delete(entry) pass # print tree.toString() result = tree.getRectangleCloseDescendants(entry8) print result", "x in candidate_distributions] mbr_pair_tagged_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for x in mbr_list_pair_tagged_candidate_distributions] overlap_value_tagged_candidate_distributions", "find E.mbr # else: # RN is an internal node # find all", "a safe path to a leaf where the leaf mbr # is not", "node2]) else: self.xtreeSupernodeInsert(node, [x.getEntry() for x in added_nodes]) # print \"supernode #1\" return", "= [x.getChild() for x in curr_entries] mbr_list = [x.getMBR() for x in curr_entries]", "z1) # lower_right = (x2, y2, z2) upper_left = (x, y) lower_right =", "100)) # y = 10 # z = 10 # lower_right = (x,", "= self.getLowerRight() result = str(list(upper_left + lower_right) + [self.isRaw()]) return result def getDimension(self):", "[] # entry_pq.push(root_entry, priority) item = root_entry pair = (priority,item) heapq.heappush(heap,pair) # print", "for tagged_overlapped_mbr in tagged_overlapped_mbr_list: curr_mbr, curr_entry = tagged_overlapped_mbr curr_node = curr_entry.getChild() result =", "by the r-tree # is not acyclic and we have cliques # note", "True: curr_leaf_status = \"-\" if (node.getParent() == None or (node.getParent() != None and", "priority_tagged_internal_entries.append(priority_tagged_internal_entry) # item = curr_entry # pair = (priority,item) # if curr_mbr.doesEnclose(reference_mbr) ==", "\"\"\" if i % 4 == 0: upper_left = (0, 0) lower_right =", "if root_mbr_is_contained == True else 1) * root_mbr_area # min-pq priority = (first_priority_component,", "# to counter saturation, domain has to grow with n # n =", "getNumEntries(self): return len(self.child_to_entry_dict) def getNumChildren(self): return self.getNumEntries() def setParent(self, node): self.parent = node", "little stilted since we don't need a O(log(n)) time operation # to find", "0: # entry = entry_pq.pop() item = None if len(heap) != 0: (priority,item)", "None) node1 = RTreeNode(parent, entry_collection1, None, entry1) entry1.setChild(node1) entry2 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection2), None) node2", "have_node_str = True overall_str_list = None if have_node_str == True: curr_depth = \"-\"", "* 100) x = random.randint(0, 10000) y = random.randint(0, 10000) # upper_left =", "margin if self.getDimension() == 2: x1, y1 = upper_left x2, y2 = lower_right", "look at parent of entry child if curr_entry.getMBR().isRaw() == True: if entry ==", "False): upper_left_a = mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight()", "added_node in added_nodes: node.addEntry(added_node.getEntry()) added_node.setParent(node) if split_status == RTree.SPLIT: # added_node.setParent(node) if node.getNumChildren()", "== mbr.getUpperLeft() lower_right_matches = self.getLowerRight() == mbr.getLowerRight() result = upper_left_matches == True and", "window_left_sizes[j]], upper_sorted_entries[window_left_sizes[j] : ]) for j in xrange(len(window_left_sizes))] curr_tuple = (low_distributions, upper_distributions) result_list.append(curr_tuple)", "axis = node.getSplitHistoryRootDimension() result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2 = result", "in entry_collection2: curr_entry.getChild().setParent(node2) node1.setSplitHistoryRootDimension(dimension) node2.setSplitHistoryRootDimension(dimension) if self.getRootEntry().getChild() == node: next_root_entry = RTreeEntry(next_mbr, None)", "3: upper_left = (100, 100) lower_right = (120, 120) \"\"\" denominator = (100", "M = 16 # n = 6,000 works in 56.672 sec. for pypy", "x) for x in entries] tagged_overlapped_mbr_list = [x for x in tagged_mbr_list if", "conflict_x_tree.doContainmentQuery(mbr) for matching_entry in matching_entries: # raise Exception() conflict_x_tree.delete(matching_entry) # if node is", "multiplier * y2 + offset) if depth != 0: pass color_choice = depth", "removeEntry(self, entry): curr_child = entry.getChild() (self.child_to_entry_dict).pop(curr_child) def getMinimumNumEntriesPerNode(self): return self.m def getMaximumNumEntriesPerNode(self): return", "next_y1 = (multiplier * x1 + offset, multiplier * y1 + offset) next_x2,", "def isTraditionalLeafNode(self): is_traditional_leaf_node = self.getNumEntries() == 0 return is_traditional_leaf_node \"\"\" def isLeafNode(self): #", "# raise Exception() for entry_to_close_ancestor_entry_list_pair in result.items(): entry, close_ancestor_entry_list = entry_to_close_ancestor_entry_list_pair print \"start", "node.isSuperNode() == True and node.getNumChildren() <= node.getMaximumNumEntriesPerNode(): node.setToSuperNode(False) if node.getParent() == None: #", "depth) @staticmethod def drawHelper(tree, entry, image, depth): node = entry.getChild() entries = node.getEntries()", "# not tested # returns entries # does intersection query def doOverlapQuery(self, mbr,", "RTreeEntry(CompositeMBR.makeMBR(mbr_collection1), None) node1 = RTreeNode(parent, entry_collection1, None, entry1) entry1.setChild(node1) entry2 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection2), None)", "return tree.adjustTree(tree, l, [e, ee], True, False) else: return (False, []) \"\"\" #", "priority_tagged_internal_entries = [] for curr_entry in entries: # set priority correctly and add", "parent.removeEntry(entry) if (parent.getNumChildren() + 2) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(entry) parent.addEntry(partner_entry) entry.getChild().setParent(parent) partner_entry.getChild().setParent(parent) return tree.adjustTree(tree,", "= int(round((1000 * math.log(1000, 2)) ** (1 / 3.0) / denominator)) # for", "@staticmethod def getEnlargedMBR(base_mbr, mbr): mbr_list = [base_mbr, mbr] upper_left_points = [x.getUpperLeft() for x", "node.getMinimumNumEntriesPerNode() or len(entry_collection4) < node.getMinimumNumEntriesPerNode(): return (False, None, None, dimension) else: return (True,", "parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) Q.append(node) # raise Exception() if node.getNumChildren() <= 1: #", "= (40, 100, 0) point3 = (50, 100, 0) point4 = (60, 100,", "PythonMagick import heapq from collections import deque # min-pq class PriorityQueue: def __init__(self):", "print tree.toString() result = tree.getRectangleCloseDescendants(entry8) print result result = tree.getAllRectangleCloseAncestors() print result print", "entry_collection1: curr_entry.getChild().setParent(node1) for curr_entry in entry_collection2: curr_entry.getChild().setParent(node2) node1.setSplitHistoryRootDimension(dimension) node2.setSplitHistoryRootDimension(dimension) if self.getRootEntry().getChild() == node:", "+ offset center_y = next_y + offset radius = 2 perimeter_x = next_x", "< node.getMinimumNumEntriesPerNode() or len(entry_collection4) < node.getMinimumNumEntriesPerNode(): return (False, None, None, dimension) else: return", "= None if union_area == 0: if mbr1.isEqualTo(mbr2) == True: overlap_ratio = 1", "node.getParent() curr_entries = node.getEntries() entry = None if node.getParent() == None: entry =", "# this is idempotent for added_node in added_nodes: node.addEntry(added_node.getEntry()) added_node.setParent(node) if split_status ==", "# im = Image.new(\"RGB\", (512, 512), \"white\") \"\"\" im = Image.new(\"RGB\", (768, 768),", "image.write(\"tree.png\") def main(): point1 = (30, 100, 0) point2 = (40, 100, 0)", "and node == self.getRootEntry().getChild(): node.addEntry(entry) curr_node = entry.getChild() curr_node.setParent(node) mbr = CompositeMBR.makeMBR([entry.getMBR()]) node.getEntry().setMBR(mbr)", "return area_change @staticmethod def doOverlap(mbr_a, mbr_b, without_borders = False): upper_left_a = mbr_a.getUpperLeft() lower_right_a", "True: if entry.getMBR().doesEnclose(mbr) == True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for curr_entry in", "# updated on 2016-08-25 to fix overlap logic for determining when to attempt", "def getLowerRight(self): return self.lower_right def getIDValue(self): return self.id_value class Point: def __init__(self, vec,", "False not in [x.getChild().getNumEntries() == 0 for x in self.getEntries()]) is_leaf_node = self.getNumChildren()", "in entry_collection2] # this line presumes that we have parent set correctly for", "NO_SPLIT = 2 def xtreeInsertHelper(self, entry, node): split_status = None next_mbr = None", "CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if have_resulting_second_entry_from_split == True: parent.removeEntry(entry) if (parent.getNumChildren() + 2) <= parent.getMaximumNumEntriesPerNode():", "presumes that we have parent set correctly for a leaf, # which is", "continue if node.isLeafNode() == True: # could have a safe path to a", "that rectangles are distinct def getAllRectangleCloseAncestors(self): start_rectangle_nodes = [x for x in self.getNodes()", "mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight() do_overlap = True # assume that", "dimension = mbr_a.getDimension() sides = [] for i in xrange(dimension): comp_a1 = upper_left_a[i]", "entry): curr_node = node E_overall = list(set(curr_node.getEntries() + [entry])) return self.rstarSplitNodeHelper(node, E_overall, entry)", "x in entries] entry.draw(tree, entries, image, depth + 1) class MBR: def __init__(self,", "image, depth): node = entry.getChild() entries = node.getEntries() mbr_list = [entry.getMBR()] for mbr", "\"supernodes:\", [x for x in tree.getNodes() if x.isSuperNode() == True], tree.getRootEntry().getChild() # tree2.draw()", "[] self.getNodesHelper(node, node_list) return node_list \"\"\" def getUnionArea(self): pass \"\"\" # takes O(log(n))", "709, 871), (1390, 1402, 1548)), \\ ((433, 499, 483), (1300, 1330, 1055))] \"\"\"", "entry in result; # if we made it this far, we should add", "if MBR.doOverlap(curr_entry.getMBR(), mbr) == True: self.doContainmentQueryHelper(mbr, curr_entry, partial_result) # prefix order def getNodes(self):", "* 0.8 # offset = (768 * 0.2) / 2 offset = (1536", "# 23.899 seconds (~55.96x slower for 55x growth; expected 317x slower) # n", "entries def doEnclosureQuery(self, mbr): partial_result = [] self.doEnclosureQueryHelper(mbr, self.getRootEntry(), partial_result) return partial_result def", "entries = node.getEntries() candidate_entries = None # if node.isLeafNode() == True: candidate_entries =", "== False: # print \"not underfull\" parent = node.getParent() curr_entries = node.getEntries() entry", "takes O(n * log(n)) time, # where n is number of actual rectangles", "create new entries, # which we do do occasionally # note that M", "base_mbr = curr_entry.getMBR() curr_mbr = MBR.getEnlargedMBR(base_mbr, mbr) tagged_mbr_list.append((curr_mbr, curr_entry)) tagged_area_values = [(x[0].getArea(), x[1])", "node @staticmethod def draw(tree, entries, image, depth): for entry in entries: RTreeEntry.drawHelper(tree, entry,", "entry) chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return self.chooseLeafHelper(entry, chosen_child) def rstarChooseLeaf(self, entry):", "math.log(1000, 2)) ** (1 / 3.0) / denominator)) # for n = 1000", "in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() == True] keep_nodes = [x for x in self.getNodesForNode(node)", "upper_left[j] comp_2b = lower_right[j] term2 = comp_2b - comp_2a term = 2 *", "= self.getLowerRight() contained_item = self.getContainedItem() mbr = RawMBR(upper_left, lower_right, contained_item) return mbr def", "== True: self.doContainmentQueryHelper(mbr, curr_entry, partial_result) # prefix order def getNodes(self): node_list = []", "is not contained by reference rectangle; # check explicitly for this case if", "if mbr.doesEnclose(reference_mbr) == False and reference_mbr.doesEnclose(mbr) == False: # ignore node if associated", "a node, which can be None if no match is found # finds", "self.parent = parent self.is_leaf = is_leaf self.m = 8 self.M = 16 self.child_to_entry_dict", "for i in xrange(entries[0].getMBR().getDimension()): low_sorted_entries = entries[ : ] low_sorted_entries.sort(key = lambda x:", "low_sorted_entries.sort(key = lambda x: x.getMBR().getUpperLeft()[i]) low_distributions = [(low_sorted_entries[ : window_left_sizes[j]], low_sorted_entries[window_left_sizes[j] : ])", "self.getLowerRight()[i] right_value2 = mbr.getLowerRight()[i] component_does_enclose = left_value1 <= left_value2 and right_value1 >= right_value2", "this parent-setting step is crucial # if node.isNonTraditionalLeafNode() == False: # this is", "(~398x slower for 145x growth; expected 1040x slower) # n = 20000 #", "True, False) else: return (False, []) \"\"\" # assume item is in tree", "internal node # find all entries of RN that cover E.mbr # follow", "+ 2 * (y2 - y1) return margin surface_area = 0 for i", "x in self.getNodes() if x.getEntry().getMBR().isRaw() == True] start_rectangle_entries = [x.getEntry() for x in", "curr_leaf_status = \"-\" if node.isLeafNode() == False else \"+\" overall_str_list = [curr_leaf_status] else:", "entry child if curr_entry.getMBR().isRaw() == True: if entry == curr_entry: return True else:", "0 else str(depth) overall_str_list = [curr_depth] else: overall_str_list = [] for entry in", "CompositeMBR.makeMBR(mbr_group2) for curr_entry in entry_group1: next_curr_node = curr_entry.getChild() if curr_entry != entry: curr_node.removeEntry(curr_entry)", "# 170.053 seconds (~398x slower for 145x growth; expected 1040x slower) # n", "start_rectangle_to_close_ancestor_entries_dict[close_descendant_entry].append(start_rectangle_entry) return start_rectangle_to_close_ancestor_entries_dict def draw(self): # im = Image.new(\"RGB\", (512, 512), \"white\") \"\"\"", "for i in xrange(10): # for i in xrange(4): \"\"\" ul_lr_pairs = [((797,", "= RTreeNode(None, [], True) entry4 = RTreeEntry(mbr4, node4) node4.setEntry(entry4) tree.insert(entry4) mbr5 = RawMBR(point5,", "rectangles are unique for close-descendant # and close-ancestor finding; the assumption is necessary", "= 8 and M = 16 # these numbers are for upper-left's in", "chosen_child) def rstarChooseLeaf(self, entry): return self.rstarChooseLeafHelper(entry, self.getRootEntry().getChild()) def rstarChooseLeafHelper(self, entry, node): if node.isLeafNode()", "(110, 200, 100), None) print tree.doContainmentQuery(curr_mbr3) # raise Exception() print tree.doOverlapQuery(curr_mbr2) # raise", "= [e, ee] next_root = RTreeNode(None, resulting_entries_from_split, False, self.getRootEntry()) l.setParent(next_root) ll.setParent(next_root) self.getRootEntry().setChild(next_root) else:", "# item = curr_entry # internal_node_stack_deque.appendleft(item) priority = (first_priority_component, second_priority_component) priority_tagged_internal_entry = (priority,", "in entry_group1] mbr_group2 = [x.getMBR() for x in entry_group2] curr_overall_mbr1 = CompositeMBR.makeMBR(mbr_group1) curr_overall_mbr2", "entries: base_mbr = curr_entry.getMBR() curr_mbr = MBR.getEnlargedMBR(base_mbr, mbr) tagged_mbr_list.append((curr_mbr, curr_entry)) tagged_area_values = [(x[0].getArea(),", "node.isLeafNode() == True: # split just in case # print \"split\" return (RTree.SPLIT,", "def toDepthString(self): root = self.getRootEntry().getChild() return self.toDepthStringHelper(root, 0) def toDepthStringHelper(self, node, depth): if", "int(round((20000 * math.log(20000, 2)) ** (1 / 3.0) / denominator)) # for n", "curr_entry in entry_collection1: curr_entry.getChild().setParent(node1) for curr_entry in entry_collection2: curr_entry.getChild().setParent(node2) node1.setSplitHistoryRootDimension(dimension) node2.setSplitHistoryRootDimension(dimension) if self.getRootEntry().getChild()", "curr_entries] mbr_list = [x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return", "and modify adjustTree(); # stop at root instead of non-existent parent of root;", "(110, 200, 100), point6) node6 = RTreeNode(None, [], True) entry6 = RTreeEntry(mbr6, node6)", "in entries: if curr_entry.getMBR().doesEnclose(mbr) == True: result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, curr_entry) if result ==", "<= node.getMaximumNumEntriesPerNode(): node.setToSuperNode(False) if node.getParent() == None: # we are a root node", "\"\"\" if node.getParent() == None: entry = tree.getRootEntry() else: entry = node.getParent().retrieveEntryForChild(node) \"\"\"", "entry.getMBR().toString() # print \"tree, currently:\", tree.toString() # tree2.delete(entry) pass # print tree.toString() result", "CompositeMBR(upper_left_point, lower_right_point, component_mbr_list) return result_mbr class HyperRectangle: def __init__(self, upper_left, lower_right, id_value): self.upper_left", "== 0 else [node.getEntry().getMBR().toString(), str(node)] for entry in entries: child = entry.getChild() child_str", "return len(self.heap) == 0 def peek(self): heap = self.heap pair = heap[0] result", "None and self.getNumChildren() == 0) or (self.getNumChildren() != 0 and False not in", "= 1,000 works in 2.996 sec. for pypy with m = 2 and", "rectangle taken from set of actual rectangles # for an r-tree and O(n", "image.strokeWidth(4) multiplier = 3 * 0.8 # offset = (768 * 0.2) /", "curr_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), mbr) == True: self.doOverlapQueryHelper(mbr, curr_entry, partial_result, without_borders) #", "stack for internal nodes and # best-first priority queue for leaf nodes #", "= node m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() axis = RTree.rstarChooseSplitAxis(E_overall, M, m)", "= self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries() axis = node.getSplitHistoryRootDimension() result =", "if x.isSuperNode() == True], tree.getRootEntry().getChild() # tree2.draw() print len(tree2.getNodes()) import time time1 =", "getMarginValue(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() if self.getDimension() == 0: raise Exception()", "# which we do do occasionally # note that M of two works", "for entry in entries: RTreeEntry.drawHelper(tree, entry, image, depth) @staticmethod def drawHelper(tree, entry, image,", "distinct def getAllRectangleCloseAncestors(self): start_rectangle_nodes = [x for x in self.getNodes() if x.getEntry().getMBR().isRaw() ==", "chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return self.chooseLeafHelper(entry, chosen_child) def rstarChooseLeaf(self, entry): return", "lower_right multiplier = 1 / (1.0 * 6.5) * 0.8 offset = (1536", "100) # y1 = int(100 + random.randint(0, k) * 100) # z1 =", "do_overlap = do_overlap and comp_a1 < comp_b2 and comp_a2 > comp_b1 else: do_overlap", "entry_collection4, dimension) else: return (True, entry_collection1, entry_collection2, dimension) def xtreeTopologicalSplit(self, node, entry): m", "else: return True else: for curr_node in node.getChildren(): result = self.hasConsistentNonTraditionalLeafDepthValuesHelper(curr_node, depth, curr_depth", "margin surface_area = 0 for i in xrange(self.getDimension()): comp_1a = upper_left[i] comp_1b =", "upper_left_b = mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight() dimension = mbr_a.getDimension() sides = [] for", "None, None) root_entry = RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) return else: entry = self.getRootEntry()", "[], True) root_mbr = CompositeMBR(None, None, None) root_entry = RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry)", "None, True) else: m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries() axis", "entry_pq.pop() item = None if len(heap) != 0: (priority,item) = heapq.heappop(heap) elif len(internal_node_stack_deque)", "mbr] upper_left_points = [x.getUpperLeft() for x in mbr_list] lower_right_points = [x.getLowerRight() for x", "children = node.getChildren() have_node_str = True is_root_node = node == self.getRootEntry().getChild() if is_root_node", "result_list = [] if len(entries) > (M + 1): raise Exception() window_left_sizes =", "!= True: partner_node = partner_entry.getChild() partner_entries = partner_node.getEntries() partner_children = [x.getChild() for x", "reference rectangle; # check explicitly for this case if reference_mbr.doesEnclose(mbr) == False: continue", "!= 0 and False not in [x.getChild().getNumEntries() == 0 for x in self.getEntries()])", "mbr7 = RawMBR(point7, (110, 200, 100), point7) node7 = RTreeNode(None, [], True) entry7", "area for i in xrange(mbr_a.getDimension()): # a \"left\" comp_a1 = min(upper_left_a[i], lower_right_a[i]) #", "for entry in entries: curr_node = entry.getChild() node.addEntry(entry) # needed this curr_node.setParent(node) \"\"\"", "SUPERNODE = 1 NO_SPLIT = 2 def xtreeInsertHelper(self, entry, node): split_status = None", "(1.0 * union_area) # raise Exception() if overlap_ratio > RTree.MAX_OVERLAP_RATIO: # raise Exception()", "0 x2 = 47 y2 = 60 next_x1 = x1 * multiplier +", "internal_node_stack_deque.appendleft(item) priority = (first_priority_component, second_priority_component) priority_tagged_internal_entry = (priority, curr_entry) priority_tagged_internal_entries.append(priority_tagged_internal_entry) # item =", "axis = RTree.rstarChooseSplitAxis(E_overall, M, m) result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2", "= entry.getChild() child_str = self.toLeafStatusStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" +", "= node def isNonTraditionalLeafNode(self): is_non_traditional_leaf_node = (self.getParent() == None and self.getNumChildren() == 0)", "= RawMBR(point7, (110, 200, 100), point7) node7 = RTreeNode(None, [], True) entry7 =", "set as new root its only child pass def condenseTree(self, leaf_node): Q =", "entries: curr_node = entry.getChild() node.addEntry(entry) # needed this curr_node.setParent(node) \"\"\" entries = node.getEntries()", "for x in window_size_pairs if x[0] <= M and x[0] >= m and", "1: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(candidate_entries, entry) if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry)", "1 second_priority_component = (-1 if root_mbr_is_contained == True else 1) * root_mbr_area #", "mbr in mbr_list: upper_left = mbr.getUpperLeft() lower_right = mbr.getLowerRight() x1, y1 = upper_left", "# remove E from L # call algorithm condenseTree(L) # if the root", "lower_right_b = mbr_b.getLowerRight() dimension = mbr_a.getDimension() sides = [] for i in xrange(dimension):", "# add actual rectangles to conflict x-tree, # use as priority (prefer_contained, prefer_large_area_if_contained_else_small)", "curr_node = entry.getChild() curr_node.setParent(node) mbr = CompositeMBR.makeMBR([entry.getMBR()]) node.getEntry().setMBR(mbr) # print \"no split\" return", "= self.rstarSplitNode(leaf_node, entry) l, ll, e, ee = split_result adjust_result = RTree.rstarAdjustTree(self, l,", "True if without_borders == True: do_overlap = do_overlap and comp_a1 < comp_b2 and", "return len(self.child_to_entry_dict) def getNumChildren(self): return self.getNumEntries() def setParent(self, node): self.parent = node def", "for i in xrange(component_mbr_list[0].getDimension()): components = [x[i] for x in points] min_comp_value =", "= [x.getMBR() for x in partner_entries] partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if node.isLeafNode() ==", "None if have_node_str == True: curr_leaf_status = \"-\" if (node.getParent() == None or", "node6 = RTreeNode(None, [], True) entry6 = RTreeEntry(mbr6, node6) node6.setEntry(entry6) tree.insert(entry6) mbr7 =", "= overlap_area / (1.0 * union_area) # raise Exception() if overlap_ratio > RTree.MAX_OVERLAP_RATIO:", "demotion when size decreases to or below M # updated on 2016-11-06 to", "start_rectangle_nodes] start_rectangle_to_close_ancestor_entries_dict = {} for start_rectangle_entry in start_rectangle_entries: start_rectangle_to_close_ancestor_entries_dict[start_rectangle_entry] = [] for start_rectangle_entry", "doContainmentQueryHelper(self, mbr, entry, partial_result): if entry.getMBR().isRaw() == True: # print mbr.toString(), entry.getMBR().toString() if", "True else 1 second_priority_component = (-1 if root_mbr_is_contained == True else 1) *", "comp1 sides.append(side) area = reduce(lambda x, y: x * y, sides) return area", "exists in conflict x-tree continue if entry == ignore_entry: # ignore node if", "# for an r-tree and O(n * log(n)) time at worst; # and", "# raise Exception() result2 = self.xtreeOverlapMinimalSplit(node, entry) entry_collection3, entry_collection4, dimension, do_fail = result2", "entry-aware nodes; made bug fix for adjustTree(); # fixed bug with parent pointers", "10), (3, 10, 10), (1, 10, 10), (3, 10, 10)] # for i", "RTreeNode(None, [], True) entry = RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry) for i in xrange(1000):", "entry def removeEntry(self, entry): curr_child = entry.getChild() (self.child_to_entry_dict).pop(curr_child) def getMinimumNumEntriesPerNode(self): return self.m def", "push(self, item, priority): pair = (priority,item) heapq.heappush(self.heap,pair) def pop(self): (priority,item) = heapq.heappop(self.heap) return", "= [(upper_sorted_entries[ : window_left_sizes[j]], upper_sorted_entries[window_left_sizes[j] : ]) for j in xrange(len(window_left_sizes))] curr_tuple =", "mbr # leaf is a non-traditional leaf leaf_node = child_node.getParent() if entry !=", "partial_result) # prefix order def getNodes(self): node_list = [] self.getNodesHelper(self.getRootEntry().getChild(), node_list) return node_list", "in d_S_pairs if x[1] == min_S_value] chosen_d_S_pair = min_S_value_d_S_pair_candidates[0] chosen_d_value = chosen_d_S_pair[0] return", "mbr.getUpperLeft()[i] right_value1 = self.getLowerRight()[i] right_value2 = mbr.getLowerRight()[i] component_does_enclose = left_value1 <= left_value2 and", "next_mbr = RawMBR(curr_mbr.getUpperLeft(), curr_mbr.getLowerRight(), None) next_node = RTreeNode(None, [], True) next_entry = RTreeEntry(next_mbr,", "def toLeafStatusString(self): root = self.getRootEntry().getChild() return self.toLeafStatusStringHelper(root) def toLeafStatusStringHelper(self, node): if node ==", "= comp_1b - comp_1a for j in xrange(i + 1, self.getDimension()): comp_2a =", "= area1 + area2 - overlap_area ovelap_ratio = None if union_area == 0:", "= int(100 + random.randint(0, k) * 100) # x2 = int(x1 + random.random()", "parent.retrieveEntryForChild(node) children = [x.getChild() for x in curr_entries] mbr_list = [x.getMBR() for x", "result def doEnclosureQueryWithEarlyStoppingHelper(self, mbr, entry): if entry.getMBR().isRaw() == True: if entry.getMBR().doesEnclose(mbr) == True:", "1330, 1055))] \"\"\" # n = 10,000 works in 1 min. 54 sec.", "axis, M, m): result = RTree.rstarGenDistributions(entries, M, m) candidate_distributions = None candidate_distributions =", "10, 10), (3, 10, 10)] # for i in xrange(10): # for i", ": ]) for j in xrange(len(window_left_sizes))] curr_tuple = (low_distributions, upper_distributions) result_list.append(curr_tuple) return result_list", "getSplitHistoryRootDimension(self): return self.split_history_root_dimension def setSplitHistoryRootDimension(self, dim): self.split_history_root_dimension = dim def getParent(self): return self.parent", "self.contained_item def getMBRList(self): return [self] def clone(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight()", "id_value @staticmethod def toPoint(mbr): if mbr.getUpperLeft() != mbr.getLowerRight(): raise Exception(\"attempted to turn a", "random.randint(0, k) * 100) # z1 = int(100 + random.randint(0, k) * 100)", "= 1,000 works in 3.428 sec. for pypy with m = 8 and", "# note that we don't necessarily need PythonMagick # note that nodes always", "isUnderfull(self): return self.getNumEntries() < self.getMinimumNumEntriesPerNode() def retrieveEntryForChild(self, node): return (self.child_to_entry_dict)[node] def toString(self): return", "raise Exception(\"expected a node to be found for a delete\") # if parent", "grow with n # n = 100 # 0.427 seconds (~1x slower for", "nodes; made bug fix for adjustTree(); # fixed bug with parent pointers for", "self.getDimension() == 0: raise Exception() if self.getDimension() == 1: x1 = upper_left[0] x2", "= chosen_d_S_pair[0] return chosen_d_value @staticmethod def rstarChooseSplitIndex(entries, axis, M, m): result = RTree.rstarGenDistributions(entries,", "0: return [] reference_mbr = reference_entry.getMBR() root_entry = self.getRootEntry() root_node = root_entry.getChild() root_mbr", "= next_x1 center_y = next_y1 radius = 4 perimeter_x = next_x1 perimeter_y =", "rstarChooseLeafHelper(self, entry, node): if node.isLeafNode() == True: if node == self.getRootEntry().getChild(): return node", "overall_str_list = [] for entry in entries: child = entry.getChild() child_str = self.toDepthStringHelper(child,", "= RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) return else: entry = self.getRootEntry() curr_entries = entry.getChild().getEntries()", "\"\"\" def rstarSplitNode(self, node, entry): curr_node = node E_overall = list(set(curr_node.getEntries() + [entry]))", "to priority queue, # ignore if contained rectangle is contained by a rectangle", "= node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) # don't use isLeafNode() for this, as internal nodes can", "= \"(\" + string.join(overall_str_list, \" \") + \")\" return overall_str def toDepthString(self): root", "in node.getEntries()] curr_x_tree = RTree() overlap_area_sum = sum([x.getArea() for x in mbr_list]) for", "print \"underfull\" parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) # don't use isLeafNode() for this, as", "= 10 # lower_right = (x, y, z) # lower_right = lower_rights[i] mbr", "comp_1b = lower_right[i] term1 = comp_1b - comp_1a for j in xrange(i +", "+ \")\" return overall_str def toDepthString(self): root = self.getRootEntry().getChild() return self.toDepthStringHelper(root, 0) def", "a rectangle in conflict x-tree, # add actual rectangles to conflict x-tree, #", "x) >= m] window_size_pairs = [(window_left_sizes[i], len(entries) - window_left_sizes[i]) for i in range(len(window_left_sizes))]", "= self.getLowerRight() sides = [] for i in xrange(self.getDimension()): comp1 = upper_left[i] comp2", "3.0) / denominator)) # for n = 20000 # k = int(round((14500 *", "== True: if (parent.getNumChildren() + 1) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(partner_entry) partner_entry.getChild().setParent(parent) return RTree.rstarAdjustTreeHelper(tree, node.getParent(),", "\"\"\" # takes O(log(n)) time on average for start rectangle # taken from", "20000 # k = int(round((14500 * math.log(14500, 2)) ** (1 / 3.0) /", "in window_left_sizes if x <= M and x >= m and (len(entries) -", "871), (1390, 1402, 1548)), \\ ((433, 499, 483), (1300, 1330, 1055))] \"\"\" #", "and # lower-right's in (ul_i, ul_i + 10000) # two strange things going", "= [(([y.getMBR() for y in x[0]], [y.getMBR() for y in x[1]]), x) for", "for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) self.condenseTreeHelper(node.getParent(), Q) return # not", "setEntry(self, entry): self.entry = entry def isSuperNode(self): return self.is_supernode def setToSuperNode(self, is_supernode): self.is_supernode", "overlap_value_tagged_candidate_distributions if x[0] == min_overlap_value] next_next_candidates = [x[1] for x in matching_overlap_value_tagged_candidate_distributions] if", "RTreeEntry(mbr7, node7) node7.setEntry(entry7) tree.insert(entry7) mbr8 = RawMBR(point8, (110, 200, 100), point8) node8 =", "= entry.getMBR() tagged_enlargement_values = [(MBR.findOverlapArea(x, mbr), x) for x in mbr_list] enlargement_values =", "toString(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() result = str(list(upper_left + lower_right) +", "in xrange(10): # for i in xrange(4): \"\"\" ul_lr_pairs = [((797, 989, 602),", "else [node.getEntry().getMBR().toString(), str(node)] for entry in entries: child = entry.getChild() child_str = self.toStringHelper(child)", "= result1 mbr_collection1 = [x.getMBR() for x in entry_collection1] mbr_collection2 = [x.getMBR() for", "\") + \")\" return overall_str def toEntriesArePresentString(self): root = self.getRootEntry().getChild() return self.toEntriesArePresentStringHelper(root) def", "\"tree:\", self.toString() self.insert(curr_entry) def condenseTreeHelper(self, node, Q): # demote super-node if necessary if", "= [x.getMBR() for x in entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) \"\"\" def rstarSplitNode(self, node,", "and False not in [x.getChild().getNumEntries() == 0 for x in self.getEntries()]) is_leaf_node =", "== True] start_rectangle_entries = [x.getEntry() for x in start_rectangle_nodes] start_rectangle_to_close_ancestor_entries_dict = {} for", "curr_mbr1 = RawMBR((100, 100, 0), (100, 100, 0), (100, 100, 0)) curr_mbr2 =", "node exists # def delete(self, E, RN): def findLeaf(self, entry): return self.findLeafHelper(entry, self.getRootEntry())", "entry in entries[0 : 4]: # print \"supernodes:\", [x for x in tree.getNodes()", "# updated on 2016-11-06 to add single-start-rectangle-based # close-descendant finding that takes O(log(n))", "vec, id_value): self.vec = vec self.id_value = id_value @staticmethod def toPoint(mbr): if mbr.getUpperLeft()", "= result next_result = (entry_group1, entry_group2, axis) return next_result def xtreeOverlapMinimalSplit(self, node, entry):", "= {} for start_rectangle_entry in start_rectangle_entries: start_rectangle_to_close_ancestor_entries_dict[start_rectangle_entry] = [] for start_rectangle_entry in start_rectangle_entries:", "== None: entry = tree.getRootEntry() else: entry = node.getParent().retrieveEntryForChild(node) \"\"\" entry = parent.retrieveEntryForChild(node)", "overall_str = \"(\" + string.join(overall_str_list, \" \") + \")\" return overall_str def chooseEntriesWithMinimalOverlapEnlargement(self,", "nodes # updated on 2016-11-16 to fix margin calculation # note that we", "node.isLeafNode() == True: # could have a safe path to a leaf where", "n = 1,000 works in 3.428 sec. for pypy with m = 8", "to conflict x-tree result_entry_list.append(entry) raw_mbr = mbr next_mbr = raw_mbr.clone() next_node = RTreeNode(None,", "also, we implement delete(); note that our tree # has entry-aware nodes; made", "def hasConsistentNonTraditionalLeafDepthValues(self): root = self.getRootEntry().getChild() curr_node = root depth = 0 while curr_node.isLeafNode()", "implement delete(); note that our tree # has entry-aware nodes; made bug fix", "[x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return (have_resulting_second_entry_from_split, resulting_entries_from_split) else:", "[mbr] mbr = CompositeMBR(upper_left_point, lower_right_point, result_mbr_list) return mbr @staticmethod def getAreaEnlargement(base_mbr, mbr): base_mbr_area", "component_mbr_list] points = upper_left_points + lower_right_points min_components = [] max_components = [] for", "1) if result == False: return False return True def toNumChildrenString(self): root =", "\"removing entry with mbr:\", entry.getMBR().toString() # print \"tree, currently:\", tree.toString() # tree2.delete(entry) pass", "if result == True: return True return False # returns entries def doContainmentQuery(self,", "n = 5500 # k = int(round((10000 * math.log(10000, 2)) ** (1 /", "True: curr_leaf_status = \"-\" if node.isLeafNode() == False else \"+\" overall_str_list = [curr_leaf_status]", "# could have a safe path to a leaf where the leaf mbr", "result def getDimension(self): return len(self.getUpperLeft()) def doesEnclose(self, mbr): dimension = self.getDimension() does_enclose =", "expected 1040x slower) # n = 20000 # 230.0411 seconds (~538x slower for", "x2, y2 = point2 change_x = x2 - x1 change_y = y2 -", "Exception() if node.getNumChildren() <= 1: # raise Exception() node.setToSuperNode(False) elif node.getNumChildren() <= node.getMaximumNumEntriesPerNode():", "[x.getMBR() for x in entries] mbr = entry.getMBR() tagged_enlargement_values = [(MBR.findOverlapArea(x, mbr), x)", "TopicKNearest, k): def getRectangleCloseDescendantsHelper(self, heap, reference_mbr, result_entry_list, ignore_entry): conflict_x_tree = RTree() internal_node_stack_deque =", "comp_a1, comp_a2, comp_b1, comp_b2 # do_overlap = True if without_borders == True: do_overlap", "leaf nodes # updated on 2016-11-16 to fix margin calculation # note that", "self.getRootEntry().getChild()) def rstarChooseLeafHelper(self, entry, node): if node.isLeafNode() == True: if node == self.getRootEntry().getChild():", "x = int(random.randint(1, 100)) # y = 10 # z = 10 #", "len(internal_node_stack_deque) != 0: item = internal_node_stack_deque.popleft() # (priority,item) = heapq.heappop(heap) entry = item", "item def isEmpty(self): return len(self.heap) == 0 def peek(self): heap = self.heap pair", "tagged_overlapped_mbr_list: curr_mbr, curr_entry = tagged_overlapped_mbr curr_node = curr_entry.getChild() result = self.findLeafHelper(entry, curr_node) if", "None if node.getParent() == None: entry = tree.getRootEntry() else: entry = parent.retrieveEntryForChild(node) children", "2)) ** (1 / 3.0) / denominator)) # for n = 20000 #", "if node.getNumEntries() != 0 else str(depth) overall_str_list = [curr_depth] else: overall_str_list = []", "= RawMBR(point5, (110, 200, 100), point5) node5 = RTreeNode(None, [], True) entry5 =", "10,000 works in 1 min. 54 sec. for pypy with m = 2", "child_node = entry.getChild() # root node never has a raw mbr # leaf", "overall_str_list = [curr_leaf_status] else: overall_str_list = [] for entry in entries: child =", "0 else: overlap_ratio = overlap_area / (1.0 * union_area) # raise Exception() if", "= next_y + offset + radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y)) children = [x.getChild()", "conflict_x_tree.insert(next_entry) elif node.isLeafNode() == False: # if we made it this far, we", "def doOverlapQuery(self, mbr, without_borders = False): partial_result = [] self.doOverlapQueryHelper(mbr, self.getRootEntry(), partial_result, without_borders)", "def pop(self): (priority,item) = heapq.heappop(self.heap) return item def isEmpty(self): return len(self.heap) == 0", "== False: if curr_mbr.doesEnclose(reference_mbr) == False and reference_mbr.doesEnclose(curr_mbr) == False: continue # item", "S_comp_value += upper_margin_value_sum S_comp_dict[i] = S_comp_value d_S_pairs = S_comp_dict.items() min_S_value = min([x[1] for", "max(upper_left_b[i], lower_right_b[i]) # print comp_a1, comp_a2, comp_b1, comp_b2 # do_overlap = True if", "for j in xrange(len(window_left_sizes))] curr_tuple = (low_distributions, upper_distributions) result_list.append(curr_tuple) return result_list @staticmethod def", "= 0 if curr_mbr_is_contained == True else 1 second_priority_component = (-1 if curr_mbr_is_contained", "tagged_mbr_list if MBR.doOverlap(curr_mbr, x[0]) == True] for tagged_overlapped_mbr in tagged_overlapped_mbr_list: curr_mbr, curr_entry =", "= lower_rights[i] mbr = RawMBR(upper_left, lower_right, None) node = RTreeNode(None, [], True) entry", "node4 = RTreeNode(None, [], True) entry4 = RTreeEntry(mbr4, node4) node4.setEntry(entry4) tree.insert(entry4) mbr5 =", "M = 4 # n = 1,000 works in 3.428 sec. for pypy", "for pypy with m = 8 and M = 16 # n =", "internal_node_stack_deque = deque() # while len(heap) != 0: while len(internal_node_stack_deque) != 0 or", "for x in points] min_comp_value = min(components) max_comp_value = max(components) min_components.append(min_comp_value) max_components.append(max_comp_value) upper_left_point", "lower_right = (10, 10) elif i % 4 == 1: upper_left = (20,", "point7) node7 = RTreeNode(None, [], True) entry7 = RTreeEntry(mbr7, node7) node7.setEntry(entry7) tree.insert(entry7) mbr8", "x in component_mbr_list] points = upper_left_points + lower_right_points min_components = [] max_components =", "= CompositeMBR.makeMBR(mbr_collection1) mbr2 = CompositeMBR.makeMBR(mbr_collection2) overlap_area = MBR.findOverlapArea(mbr1, mbr2) area1 = mbr1.getArea() area2", "the root has only one child (and it is not a leaf) #", "keeping the node if node.isUnderfull() == False: # print \"not underfull\" parent =", "None \"\"\" # a little stilted since we don't need a O(log(n)) time", "return chosen_d_value @staticmethod def rstarChooseSplitIndex(entries, axis, M, m): result = RTree.rstarGenDistributions(entries, M, m)", "mbr continue if conflict_x_tree.doEnclosureQueryWithEarlyStopping(mbr) == True: # ignore node if enclosing mbr exists", "@staticmethod def rstarGenDistributions(entries, M, m): result_list = [] if len(entries) > (M +", "order def getNodes(self): node_list = [] self.getNodesHelper(self.getRootEntry().getChild(), node_list) return node_list def getNodesHelper(self, node,", "# k = int(round((10000 * math.log(10000, 2)) ** (1 / 3.0) / denominator))", "getEntry(self): return self.entry def setEntry(self, entry): self.entry = entry def isSuperNode(self): return self.is_supernode", "100, 0) point3 = (50, 100, 0) point4 = (60, 100, 0) point5", "None if union_area == 0: if mbr1.isEqualTo(mbr2) == True: overlap_ratio = 1 else:", "partner_children = [x.getChild() for x in partner_entries] partner_mbr_list = [x.getMBR() for x in", "underfull\" parent = node.getParent() curr_entries = node.getEntries() entry = parent.retrieveEntryForChild(node) children = [x.getChild()", "if x[0] == min_enlargement_value] candidate_entries = [mbr_to_entry_dict[x[1]] for x in candidate_tagged_enlargement_values] return candidate_entries", "y in x[1]]) for x in upper_comp_distributions] upper_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x", "= entry.getChild() (self.child_to_entry_dict).pop(curr_child) def getMinimumNumEntriesPerNode(self): return self.m def getMaximumNumEntriesPerNode(self): return self.M def isFull(self):", "x) for x in next_candidate_distributions] mbr_pair_tagged_next_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for x in", "= 14500 # 170.053 seconds (~398x slower for 145x growth; expected 1040x slower)", "necessarily need PythonMagick # note that nodes always point to same entries #", "comp_b1)) sides.append(side) intersection_volume = reduce(lambda x, y: x * y, sides) return intersection_volume", "self.getRootEntry().getChild(): # if node.getNumChildren() == 0: # if node.isNonTraditionalLeafNode() == True: if node.isLeafNode()", "priority_tagged_internal_entries.sort(key = lambda x: x[0], reverse = True) for priority_tagged_internal_entry in priority_tagged_internal_entries: priority,", "xrange(dimension): comp_a1 = upper_left_a[i] comp_a2 = lower_right_a[i] comp_b1 = upper_left_b[i] comp_b2 = lower_right_b[i]", "+ x[1].getMarginValue() for x in low_mbr_pairs] low_margin_value_sum = sum(low_margin_values) S_comp_value += low_margin_value_sum upper_constituent_mbr_list_pairs", "# finds one match if such a node exists # def delete(self, E,", "10) elif i % 4 == 1: upper_left = (20, 20) lower_right =", "is_non_traditional_leaf_node \"\"\" def isTraditionalLeafNode(self): is_traditional_leaf_node = self.getNumEntries() == 0 return is_traditional_leaf_node \"\"\" def", "= True overall_str_list = None if have_node_str == True: curr_leaf_status = \"-\" if", "node.getSplitHistoryRootDimension() == None: return (None, None, None, True) else: m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M", "result_mbr class HyperRectangle: def __init__(self, upper_left, lower_right, id_value): self.upper_left = upper_left self.lower_right =", "def xtreeOverlapMinimalSplit(self, node, entry): if node.getSplitHistoryRootDimension() == None: return (None, None, None, True)", "mbr_list = [base_mbr, mbr] upper_left_points = [x.getUpperLeft() for x in mbr_list] lower_right_points =", "x2 = int(x1 + random.random() * 100) # y2 = int(y1 + random.random()", "made bug fix for adjustTree(); # fixed bug with parent pointers for xtreeInsert();", "entry7 = RTreeEntry(mbr7, node7) node7.setEntry(entry7) tree.insert(entry7) mbr8 = RawMBR(point8, (110, 200, 100), point8)", "node, resulting_entries_from_split, have_resulting_second_entry_from_split, is_first_call_after_first_pass): if node == None: return (False, []) else: parent", "100) x = random.randint(0, 10000) y = random.randint(0, 10000) # upper_left = (x1,", "image.strokeColor(\"none\") image.fillColor(\"black\") center_x = next_x + offset center_y = next_y + offset radius", "multiplier + offset next_y1 = y1 * multiplier + offset next_x2 = x2", "return (self.child_to_entry_dict)[child_node] def getChildren(self): return (self.child_to_entry_dict).keys() def getNumEntries(self): return len(self.child_to_entry_dict) def getNumChildren(self): return", "node, E_overall, entry): # prev_leaf_status = node.isLeafNode() prev_leaf_status = None curr_node = node", "when we initially insert parent = node.getParent() entry1 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection1), None) node1 =", "lthe leaf L that contains E is found # remove E from L", "x in mbr_list] lower_right_points = [x.getLowerRight() for x in mbr_list] points = upper_left_points", "have a safe path to a leaf where the leaf mbr # is", "!= None and node in node.getParent().getChildren())) == False else \"+\" overall_str_list = [curr_leaf_status]", "in matching_entries: # raise Exception() conflict_x_tree.delete(matching_entry) # if node is a leaf node,", "entry_group2 = result next_result = (entry_group1, entry_group2, axis, False) return next_result def xtreeSupernodeInsert(self,", "entry in entries: child = entry.getChild() child_str = self.toDepthStringHelper(child, depth + 1) curr_str", "x2 = 47 y2 = 60 next_x1 = x1 * multiplier + offset", "union_area = area1 + area2 - overlap_area ovelap_ratio = None if union_area ==", "RTreeNode(None, [], True) entry = RTreeEntry(mbr, node) node.setEntry(entry) # entries.append(entry) \"\"\" # for", "class Point: def __init__(self, vec, id_value): self.vec = vec self.id_value = id_value @staticmethod", "# raise Exception() if overlap_ratio > RTree.MAX_OVERLAP_RATIO: # raise Exception() result2 = self.xtreeOverlapMinimalSplit(node,", "have_node_str = True is_root_node = node == self.getRootEntry().getChild() if is_root_node == True: have_node_str", "lower_rights = [(3, 10, 10), (1, 10, 10), (8, 10, 10), (6, 10,", "split_result = self.xtreeSplitNode(node, entry) was_successful, entry_collection1, entry_collection2, dimension = split_result if was_successful ==", "for x in mbr_pair_tagged_candidate_distributions] overlap_values = [x[0] for x in overlap_value_tagged_candidate_distributions] min_overlap_value =", "for x in matching_overlap_value_tagged_candidate_distributions] if len(matching_overlap_value_tagged_candidate_distributions) > 1: next_candidate_distributions = next_next_candidates mbr_list_pair_tagged_candidate_distributions =", "import random entries = [] # lower_rights = [(3, 10, 10), (1, 10,", "time on average # for start rectangle taken from set of actual rectangles", "# raise Exception() conflict_x_tree.delete(matching_entry) # if node is a leaf node, it has", "= 1000 # k = int(round((5500 * math.log(5500, 2)) ** (1 / 3.0)", "min_area] candidate_entries = [x[1] for x in candidate_tagged_area_values] return candidate_entries @staticmethod def rstarGenDistributions(entries,", "l, [e, ee], True, False) else: return (False, []) \"\"\" # assume item", "# n = 2000 # n = 1000 # n = 20000 n", "\"\"\" if node.isLeafNode() == False: curr_mbr = entry.getMBR() entries = self.getEntries() tagged_mbr_list =", "node_list = [] self.getNodesHelper(node, node_list) return node_list \"\"\" def getUnionArea(self): pass \"\"\" #", "class RawMBR(MBR): def __init__(self, upper_left, lower_right, contained_item): MBR.__init__(self, upper_left, lower_right) self.contained_item = contained_item", "for x in entries] mbr = entry.getMBR() tagged_enlargement_values = [(MBR.findOverlapArea(x, mbr), x) for", "= heapq.heappop(self.heap) return item def isEmpty(self): return len(self.heap) == 0 def peek(self): heap", "if MBR.doOverlap(curr_entry.getMBR(), mbr) == True: self.doOverlapQueryHelper(mbr, curr_entry, partial_result, without_borders) # returns entries def", "- x1 change_y = y2 - y1 distance = math.sqrt(change_x ** 2 +", "None, None, dimension) else: return (True, entry_collection3, entry_collection4, dimension) else: return (True, entry_collection1,", "result = self.findLeafHelper(entry, next_entry) if result == True: return result return False def", "which can be None if no match is found # finds one match", "upper_left, lower_right): self.upper_left = upper_left self.lower_right = lower_right def isRaw(self): return False def", "x[1]]) for x in upper_comp_distributions] upper_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x in upper_constituent_mbr_list_pairs]", "= entry.getChild() (self.child_to_entry_dict)[curr_child] = entry def removeEntry(self, entry): curr_child = entry.getChild() (self.child_to_entry_dict).pop(curr_child) def", "= self.resolveEnlargementTie(candidate_entries, entry) chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return self.chooseLeafHelper(entry, chosen_child) def", "idempotent for added_node in added_nodes: node.addEntry(added_node.getEntry()) added_node.setParent(node) if split_status == RTree.SPLIT: # added_node.setParent(node)", "# priority = -1 * root_mbr_area # entry_pq = PriorityQueue() heap = []", "color = PythonMagick.Color(0, 0, 65535, 32767) elif color_choice == 2: color = PythonMagick.Color(0,", "entry1 = RTreeEntry(curr_overall_mbr1, node1) entry2 = RTreeEntry(curr_overall_mbr2, node2) node1.setEntry(entry1) node2.setEntry(entry2) if parent !=", "100), point2) node2 = RTreeNode(None, [], True) entry2 = RTreeEntry(mbr2, node2) node2.setEntry(entry2) tree.insert(entry2)", "for x in curr_entries] mbr_list = [x.getMBR() for x in curr_entries] tight_overall_mbr =", "node): if node.isLeafNode() == True: if node == self.getRootEntry().getChild(): return node else: return", "entries = entry.getChild().getEntries() for curr_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), mbr) == True: self.doOverlapQueryHelper(mbr,", "overlap_ratio = 0 else: overlap_ratio = overlap_area / (1.0 * union_area) # raise", "entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node2) entry1 = RTreeEntry(curr_overall_mbr1, node1) entry2 = RTreeEntry(curr_overall_mbr2, node2) node1.setEntry(entry1) node2.setEntry(entry2)", "(self.child_to_entry_dict)[curr_child] = curr_entry self.split_history_root_dimension = split_history_root_dimension self.is_supernode = is_supernode self.entry = entry def", "[e, ee] next_root = RTreeNode(None, resulting_entries_from_split, False, self.getRootEntry()) l.setParent(next_root) ll.setParent(next_root) self.getRootEntry().setChild(next_root) else: pass", "comp_2b = lower_right[j] term2 = comp_2b - comp_2a term = 2 * term1", "(40, 40) mbr = RawMBR(upper_left, lower_right, None) node = RTreeNode(None, [], True) entry", "return self.id_value import string class RTree: def __init__(self): root_node = RTreeNode(None, [], True)", "True) entry8 = RTreeEntry(mbr8, node8) node8.setEntry(entry8) # problem here tree.insert(entry8) print tree.toString() print", "i in xrange(dimension): left_value1 = self.getUpperLeft()[i] left_value2 = mbr.getUpperLeft()[i] right_value1 = self.getLowerRight()[i] right_value2", "print tree.doContainmentQuery(curr_mbr3) # raise Exception() print tree.doOverlapQuery(curr_mbr2) # raise Exception() print tree.toString() #", "raise Exception() print tree.doOverlapQuery(curr_mbr2) # raise Exception() print tree.toString() # tree.delete(entry1) print tree.toString()", "= mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight() do_overlap = True # assume", "x[0], reverse = True) for priority_tagged_internal_entry in priority_tagged_internal_entries: priority, internal_entry = priority_tagged_internal_entry item", "image, depth) @staticmethod def drawHelper(tree, entry, image, depth): node = entry.getChild() entries =", "we assume rectangles are unique for close-descendant # and close-ancestor finding; the assumption", "x in d_S_pairs]) min_S_value_d_S_pair_candidates = [x for x in d_S_pairs if x[1] ==", "to be good enough to cut down branches explored; # to counter saturation,", "return margin if self.getDimension() == 2: x1, y1 = upper_left x2, y2 =", "mbr): self.mbr = mbr def getChild(self): return self.child def setChild(self, node): self.child =", "lower_right, contained_item): MBR.__init__(self, upper_left, lower_right) self.contained_item = contained_item def isRaw(self): return True @staticmethod", "of modifying mbr if we plan on keeping the node if node.isUnderfull() ==", "result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, curr_entry) if result == True: return True return False #", "True) else: m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() E_overall = node.getEntries() axis =", "curr_entry.getChild().setParent(node1) for curr_entry in entry_collection2: curr_entry.getChild().setParent(node2) node1.setSplitHistoryRootDimension(dimension) node2.setSplitHistoryRootDimension(dimension) if self.getRootEntry().getChild() == node: next_root_entry", "tree.insert(entry1) mbr2 = RawMBR(point2, (110, 200, 100), point2) node2 = RTreeNode(None, [], True)", "for x in mbr_pair_tagged_next_candidate_distributions] combined_area_values = [x[0] for x in combined_area_tagged_next_candidate_distributions] min_combined_area_value =", "= [] for entry in entries: child = entry.getChild() child_str = self.toDepthStringHelper(child, depth", "point\" \"\"\" if node.isSuperNode() == True: # print \"supernode encountered\" parent = node.getParent()", "= do_overlap and comp_a1 <= comp_b2 and comp_a2 >= comp_b1 if do_overlap ==", "self.id_value = id_value def getUpperLeft(self): return self.upper_left def getLowerRight(self): return self.lower_right def getIDValue(self):", "tagged_overlapped_mbr_list = [x for x in tagged_mbr_list if MBR.doOverlap(curr_mbr, x[0]) == True] for", "80) elif i % 4 == 3: upper_left = (100, 100) lower_right =", "parent.removeEntry(parent.retrieveEntryForChild(node)) # don't use isLeafNode() for this, as internal nodes can temporarily look", "False: # if we made it this far, we should add children to", "insert using order of high-level to low-level # Q = list(set(Q)) Q.reverse() for", "1054, 307), (1776, 1597, 501)), \\ ((803, 233, 521), (1314, 717, 1487)), \\", "False else: return True else: for curr_node in node.getChildren(): result = self.hasConsistentNonTraditionalLeafDepthValuesHelper(curr_node, depth,", "node.getParent() else: entries = node.getEntries() candidate_entries = None # if node.isLeafNode() == True:", "== True: partner_node = partner_entry.getChild() partner_entries = partner_node.getEntries() partner_children = [x.getChild() for x", "= True overall_str_list = None if have_node_str == True: curr_leaf_status = str(node.getNumChildren()) overall_str_list", "in mbr_list: next_mbr = RawMBR(curr_mbr.getUpperLeft(), curr_mbr.getLowerRight(), None) next_node = RTreeNode(None, [], True) next_entry", "= entry def isSuperNode(self): return self.is_supernode def setToSuperNode(self, is_supernode): self.is_supernode = is_supernode def", "(110, 200, 100), point2) node2 = RTreeNode(None, [], True) entry2 = RTreeEntry(mbr2, node2)", "= [x for x in self.getNodes() if x.getEntry().getMBR().isRaw() == True] start_rectangle_entries = [x.getEntry()", "for start_rectangle_entry in start_rectangle_entries: close_descendant_entries = self.getRectangleCloseDescendants(start_rectangle_entry) for close_descendant_entry in close_descendant_entries: start_rectangle_to_close_ancestor_entries_dict[close_descendant_entry].append(start_rectangle_entry) return", "self.upper_left = upper_left self.lower_right = lower_right self.id_value = id_value def getUpperLeft(self): return self.upper_left", "(False, None, None, None) dimension = None result1 = self.xtreeTopologicalSplit(node, entry) entry_collection1, entry_collection2,", "heapq.heappop(heap) entry = item node = entry.getChild() mbr = entry.getMBR() if mbr.doesEnclose(reference_mbr) ==", "= node.getParent().retrieveEntryForChild(node) \"\"\" entry = parent.retrieveEntryForChild(node) children = [x.getChild() for x in curr_entries]", "actual rectangles for an r-tree; # takes O(n * log(n)) time at worst;", "499, 483), (1300, 1330, 1055))] \"\"\" # n = 10,000 works in 1", "entry_group2, axis, False) return next_result def xtreeSupernodeInsert(self, node, entries): if node.isSuperNode() == False:", "1000 # 1.1649 seconds (~2.72x slower for 10x growth; expected 33x slower) #", "multi_overlap_ratio <= RTree.MAX_OVERLAP_RATIO: node.setToSuperNode(False) elif node.isUnderfull() == True: \"\"\" if node.isUnderfull() == True:", "== min_overlap_value] next_next_candidates = [x[1] for x in matching_overlap_value_tagged_candidate_distributions] if len(matching_overlap_value_tagged_candidate_distributions) > 1:", "in Q: curr_entry = curr_node.getEntry() # print \"mbr:\", curr_entry.getMBR().toString() # print \"tree:\", self.toString()", "if entry != self.getRootEntry() else None if leaf_node == None: raise Exception(\"expected a", "1133)), \\ ((262, 221, 872), (500, 279, 1521)), \\ ((332, 886, 493), (822,", "Exception() window_left_sizes = [m - 1 + k for k in range(1, M", "entry3 = RTreeEntry(mbr3, node3) node3.setEntry(entry3) tree.insert(entry3) mbr4 = RawMBR(point4, (110, 200, 100), point4)", "(1 / 3.0) / denominator)) # for n = 20000 # k =", "= deque() # while len(heap) != 0: while len(internal_node_stack_deque) != 0 or len(heap)", "close-ancestor finding, # which for a well-formed r-tree, takes O(n * log(n)) time;", "d_S_pairs = S_comp_dict.items() min_S_value = min([x[1] for x in d_S_pairs]) min_S_value_d_S_pair_candidates = [x", "def chooseEntriesWithMinimalOverlapEnlargement(self, entries, entry): mbr_to_entry_dict = {} for i in range(len(entries)): curr_entry =", "root_entry pair = (priority,item) heapq.heappush(heap,pair) # print entry_pq # raise Exception() result_entry_list =", "self.chooseEntriesWithMinimalOverlapEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(candidate_entries, entry) if len(candidate_entries) !=", "in r-tree; these times assume \"maximal disjointedness\" # and depth-first stack for internal", "[x for x in d_S_pairs if x[1] == min_S_value] chosen_d_S_pair = min_S_value_d_S_pair_candidates[0] chosen_d_value", "== False: return 0 else: upper_left_a = mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight() upper_left_b =", "string.join(overall_str_list, \" \") + \")\" return overall_str def toEntriesArePresentString(self): root = self.getRootEntry().getChild() return", "[(3, 10, 10), (1, 10, 10), (8, 10, 10), (6, 10, 10), (9,", "= self.getUpperLeft() == mbr.getUpperLeft() lower_right_matches = self.getLowerRight() == mbr.getLowerRight() result = upper_left_matches ==", "next_y1, next_x2, next_y2)) image.write(\"tree.png\") def main(): point1 = (30, 100, 0) point2 =", "rectangles are distinct def getAllRectangleCloseAncestors(self): start_rectangle_nodes = [x for x in self.getNodes() if", "self.getRootEntry() root_node = root_entry.getChild() root_mbr = root_entry.getMBR() root_mbr_is_actual = root_mbr.isRaw() root_mbr_is_contained = reference_mbr.doesEnclose(root_mbr)", "elif split_status == RTree.SUPERNODE: pass # print \"no split\" return (RTree.NO_SPLIT, [node]) def", "size decreases to or below M # updated on 2016-11-06 to add single-start-rectangle-based", "\"tree, currently:\", tree.toString() # tree2.delete(entry) pass # print tree.toString() result = tree.getRectangleCloseDescendants(entry8) print", "= do_overlap and comp_a1 < comp_b2 and comp_a2 > comp_b1 else: do_overlap =", "int(round((1000 * math.log(1000, 2)) ** (1 / 3.0) / denominator)) # for n", "return self.split_history_root_dimension def setSplitHistoryRootDimension(self, dim): self.split_history_root_dimension = dim def getParent(self): return self.parent def", "split_status, added_nodes = result curr_entry = node.getEntry() curr_mbr = curr_entry.getMBR() mbr = entry.getMBR()", "<= parent.getMaximumNumEntriesPerNode(): parent.addEntry(partner_entry) partner_entry.getChild().setParent(parent) return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False) else: split_result = tree.rstarSplitNode(parent,", "(1.0 * 6.5) * 0.8 offset = (1536 * 0.2) / 2 next_x1,", "entry.getMBR() tagged_enlargement_values = [(MBR.findOverlapArea(x, mbr), x) for x in mbr_list] enlargement_values = [x[0]", "= multiplier * x next_y = multiplier * y image.strokeColor(\"none\") image.fillColor(\"black\") center_x =", "True: curr_depth = \"-\" if node.getNumEntries() != 0 else str(depth) overall_str_list = [curr_depth]", "leaves in r-tree; these times assume \"maximal disjointedness\" # and depth-first stack for", "[node.getEntry().getMBR().toString()] # overall_str_list = [] if node.getNumChildren() == 0 else [node.getEntry().getMBR().toString(), str(node)] for", "(node1, node2, entry1, entry2) @staticmethod def rstarPreadjustTree(self, leaf_node): node = leaf_node parent =", "== False: # this is idempotent for added_node in added_nodes: node.addEntry(added_node.getEntry()) added_node.setParent(node) if", "10000 # 84.222 seconds (~197x slower for 100x growth; expected 664x slower) #", "== 1: # shorten tree entries = root.getEntries() chosen_entry = entries[0] chosen_child =", "__init__(self, parent, entries, is_leaf, entry = None, split_history_root_dimension = None, is_supernode = False):", "== False: overall_str_list = [node.getEntry().getMBR().toString()] # overall_str_list = [node.getEntry().getMBR().toString(), str(node)] else: overall_str_list =", "0.2) / 2 next_x = multiplier * x next_y = multiplier * y", "multi_overlap_ratio = overlap_area_sum / (1.0 * union_area) if multi_overlap_ratio <= RTree.MAX_OVERLAP_RATIO: node.setToSuperNode(False) elif", "M, m): result = RTree.rstarGenDistributions(entries, M, m) candidate_distributions = None candidate_distributions = result[axis][0]", "right_value1 >= right_value2 if component_does_enclose == False: does_enclose = False break return does_enclose", "= result[axis][0] + result[axis][1] mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for y in x[0]], [y.getMBR() for", "do_fail == True or len(entry_collection3) < node.getMinimumNumEntriesPerNode() or len(entry_collection4) < node.getMinimumNumEntriesPerNode(): return (False,", "time1 print \"time difference:\", time_diff, \"seconds\" # raise Exception() for entry_to_close_ancestor_entry_list_pair in result.items():", "result = self.xtreeInsertHelper(entry, follow) split_status, added_nodes = result curr_entry = node.getEntry() curr_mbr =", "= 6,000 works in 56.672 sec. for pypy with m = 8 and", "return self.id_value class Point: def __init__(self, vec, id_value): self.vec = vec self.id_value =", "multiplier + offset next_y2 = y2 * multiplier + offset \"\"\" # image.draw(PythonMagick.DrawableRectangle(next_x1,", "x in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() == True] for keep_node in keep_nodes: Q.append(keep_node) #", "result_mbr_list) return mbr @staticmethod def getAreaEnlargement(base_mbr, mbr): base_mbr_area = base_mbr.getArea() enlarged_mbr = MBR.getEnlargedMBR(base_mbr,", "4 perimeter_x = next_x1 perimeter_y = next_y1 + radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y))", "* multiplier + offset \"\"\" # image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2)) image.write(\"tree.png\") def main():", "lower_right = self.getLowerRight() contained_item = self.getContainedItem() mbr = RawMBR(upper_left, lower_right, contained_item) return mbr", "findLeaf(self, entry): return self.findLeafHelper(entry, self.getRootEntry()) def findLeafHelper(self, entry, curr_entry): \"\"\" if node.isLeafNode() ==", "self.getNumEntries() def setParent(self, node): self.parent = node def isNonTraditionalLeafNode(self): is_non_traditional_leaf_node = (self.getParent() ==", "stilted since we don't need a O(log(n)) time operation # to find the", "perimeter_y = next_y + offset + radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y)) children =", "(100, 100, 0)) curr_mbr2 = RawMBR((50, 100, 0), (50, 100, 0), point3) curr_mbr2b", "151), (889, 1755, 320)), \\ ((945, 260, 1091), (1932, 332, 1133)), \\ ((262,", "point6 = (80, 100, 0) point7 = (90, 100, 0) point8 = (110,", "entries matching_entries = conflict_x_tree.doContainmentQuery(mbr) for matching_entry in matching_entries: # raise Exception() conflict_x_tree.delete(matching_entry) #", "= max(upper_left_b[i], lower_right_b[i]) # print comp_a1, comp_a2, comp_b1, comp_b2 # do_overlap = True", "tree.delete(entry4) tree.delete(entry5) tree.delete(entry6) tree.delete(entry7) tree.delete(entry8) \"\"\" print tree.toString() tree2 = RTree() import random", "contained_item = self.getContainedItem() mbr = RawMBR(upper_left, lower_right, contained_item) return mbr def doesMatch(self, mbr):", "def getMBR(self): return self.mbr def setMBR(self, mbr): self.mbr = mbr def getChild(self): return", "return len(self.getUpperLeft()) def doesEnclose(self, mbr): dimension = self.getDimension() does_enclose = True for i", "to cut down branches explored; # to counter saturation, domain has to grow", "\"\"\" # if RN is a leaf node # search all entries of", "return result class CompositeMBR(MBR): def __init__(self, upper_left, lower_right, mbr_list): MBR.__init__(self, upper_left, lower_right) self.mbr_list", "= [(x.getMBR(), x) for x in entries] tagged_overlapped_mbr_list = [x for x in", "= resulting_entries_from_split l = e.getChild() ll = ee.getChild() if (self.getRootEntry().getChild().getNumEntries() + 1) <=", "# we are a root node if self.getRootEntry().getChild().getNumChildren() == 0: root_node = RTreeNode(None,", "0 or len(heap) != 0: # entry = entry_pq.pop() item = None if", "case # print \"split\" return (RTree.SPLIT, [node]) elif node.isNonTraditionalLeafNode() == True: node.addEntry(entry) entry.getChild().setParent(node)", "node2.setEntry(entry2) tree.insert(entry2) mbr3 = RawMBR(point3, (110, 200, 100), point3) node3 = RTreeNode(None, [],", "# print \"split\" return (RTree.SPLIT, [node]) elif node.isNonTraditionalLeafNode() == True: node.addEntry(entry) entry.getChild().setParent(node) \"\"\"", "2: x1, y1 = upper_left x2, y2 = lower_right margin = 2 *", "curr_entry.getMBR() curr_mbr_is_actual = curr_mbr.isRaw() curr_mbr_is_contained = reference_mbr.doesEnclose(curr_mbr) curr_mbr_area = curr_mbr.getArea() first_priority_component = 0", "__init__(self): root_node = RTreeNode(None, [], True) root_mbr = CompositeMBR(None, None, None) root_entry =", "* y image.strokeColor(\"none\") image.fillColor(\"black\") center_x = next_x + offset center_y = next_y +", "= self.getUpperLeft()[i] left_value2 = mbr.getUpperLeft()[i] right_value1 = self.getLowerRight()[i] right_value2 = mbr.getLowerRight()[i] component_does_enclose =", "1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return self.chooseLeafHelper(entry,", "first_priority_component = 0 if curr_mbr_is_contained == True else 1 second_priority_component = (-1 if", "RawMBR(point5, (110, 200, 100), point5) node5 = RTreeNode(None, [], True) entry5 = RTreeEntry(mbr5,", "is_root_node = node == self.getRootEntry().getChild() if is_root_node == True: have_node_str = True overall_str_list", "self.toDepthStringHelper(root, 0) def toDepthStringHelper(self, node, depth): if node == None: return \"\" entries", "== True: if entry.getMBR().doesEnclose(mbr) == True: return True else: entries = entry.getChild().getEntries() for", "bug with parent pointers for xtreeInsert(); # have supernode demotion when size decreases", "[x for x in tagged_mbr_list if MBR.doOverlap(curr_mbr, x[0]) == True] for tagged_overlapped_mbr in", "self.rstarChooseLeaf(entry) adjust_result = None if leaf_node.isFull() == False: leaf_node.addEntry(entry) entry.getChild().setParent(leaf_node) adjust_result = RTree.rstarAdjustTree(self,", "== 0 def peek(self): heap = self.heap pair = heap[0] result = pair", "an internal node # find all entries of RN that cover E.mbr #", "\\ ((803, 233, 521), (1314, 717, 1487)), \\ ((660, 268, 962), (1293, 619,", "+ random.random() * 100) # y2 = int(y1 + random.random() * 100) #", "for x in entries] entry.draw(tree, entries, image, depth + 1) class MBR: def", "- max(comp_a1, comp_b1)) sides.append(side) intersection_volume = reduce(lambda x, y: x * y, sides)", "curr_node.removeEntry(curr_entry) next_curr_node.setParent(node2) entry1 = RTreeEntry(curr_overall_mbr1, node1) entry2 = RTreeEntry(curr_overall_mbr2, node2) node1.setEntry(entry1) node2.setEntry(entry2) if", "getUnionArea(self): pass \"\"\" # takes O(log(n)) time on average for start rectangle #", "candidate_entries = [mbr_to_entry_dict[x[1]] for x in candidate_tagged_enlargement_values] return candidate_entries def resolveEnlargementTie(self, entries, entry):", "for x in tagged_mbr_list if MBR.doOverlap(curr_mbr, x[0]) == True] for tagged_overlapped_mbr in tagged_overlapped_mbr_list:", "fix for adjustTree(); # fixed bug with parent pointers for xtreeInsert(); # have", "margin = 2 * (x2 - x1) + 2 * (y2 - y1)", "d_S_pairs if x[1] == min_S_value] chosen_d_S_pair = min_S_value_d_S_pair_candidates[0] chosen_d_value = chosen_d_S_pair[0] return chosen_d_value", "= node.getEntries() axis = node.getSplitHistoryRootDimension() result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2", "self.getRootEntry().getChild(): return node else: return node.getParent() else: entries = node.getEntries() candidate_entries = None", "where the leaf mbr # is not contained by reference rectangle; # check", "[], True) entry6 = RTreeEntry(mbr6, node6) node6.setEntry(entry6) tree.insert(entry6) mbr7 = RawMBR(point7, (110, 200,", "True] for keep_node in keep_nodes: Q.append(keep_node) # only makes sense to speak of", "if node.isNonTraditionalLeafNode() == False: # this is idempotent for added_node in added_nodes: node.addEntry(added_node.getEntry())", "where n is number of actual rectangles or leaves; # assumes that rectangles", "window_left_sizes = [x for x in window_left_sizes if x <= M and x", "RawMBR((50, 50, 0), (100, 100, 0), HyperRectangle((50, 50, 0), (100, 100, 0), 1))", "= PythonMagick.Color(0, 0, 65535, 32767) elif color_choice == 2: color = PythonMagick.Color(0, 65535,", "# keep_nodes = [x for x in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() == True] keep_nodes", "y2 * multiplier + offset \"\"\" # image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2)) image.write(\"tree.png\") def", "= self.getUpperLeft() lower_right = self.getLowerRight() if self.getDimension() == 0: raise Exception() if self.getDimension()", "(512, 512), \"white\") \"\"\" im = Image.new(\"RGB\", (768, 768), \"white\") draw = ImageDraw.Draw(im)", "ll, e, ee = split_result adjust_result = RTree.rstarAdjustTree(self, l, [e, ee], True) ended_with_split2,", "0 and node == self.getRootEntry().getChild(): # if node.getNumChildren() == 0: # if node.isNonTraditionalLeafNode()", "# search all entries of RN to find E.mbr # else: # RN", "time_diff = time2 - time1 print \"time difference:\", time_diff, \"seconds\" # raise Exception()", "in entries: # set priority correctly and add to priority queue curr_node =", "M = 16 # these numbers are for upper-left's in (100, 10100) and", "+ string.join(overall_str_list, \" \") + \")\" return overall_str def toEntriesArePresentString(self): root = self.getRootEntry().getChild()", "if node.getNumChildren() > node.getMaximumNumEntriesPerNode(): split_result = self.xtreeSplitNode(node, entry) was_successful, entry_collection1, entry_collection2, dimension =", "MAX_OVERLAP_RATIO = 0.2 def xtreeSplitNode(self, node, entry): # we never split a super-node", "child = entry.getChild() child_str = self.toLeafStatusStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\"", "returns entries def doEnclosureQuery(self, mbr): partial_result = [] self.doEnclosureQueryHelper(mbr, self.getRootEntry(), partial_result) return partial_result", "lower_right = mbr.getLowerRight() x1, y1 = upper_left x2, y2 = lower_right multiplier =", "node.setToSuperNode(False) if node.getParent() == None: # we are a root node if self.getRootEntry().getChild().getNumChildren()", "node.isNonTraditionalLeafNode() == True: if node.isLeafNode() == True and node == self.getRootEntry().getChild(): node.addEntry(entry) curr_node", "in entries: child = entry.getChild() child_str = self.toNumChildrenStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str", "x[1].getMarginValue() for x in low_mbr_pairs] low_margin_value_sum = sum(low_margin_values) S_comp_value += low_margin_value_sum upper_constituent_mbr_list_pairs =", "0: color = PythonMagick.Color(65535, 0, 0, 32767) elif color_choice == 1: color =", "= entry.getChild() node.addEntry(entry) # needed this curr_node.setParent(node) \"\"\" entries = node.getEntries() mbr_list =", "node.getParent() entry1 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection1), None) node1 = RTreeNode(parent, entry_collection1, None, entry1) entry1.setChild(node1) entry2", "works in 1 min. 54 sec. for pypy with m = 2 and", "mbr2 = RawMBR(point2, (110, 200, 100), point2) node2 = RTreeNode(None, [], True) entry2", "True) entry = RTreeEntry(mbr, node) node.setEntry(entry) # entries.append(entry) \"\"\" # for entry in", "r-tree, this takes O(n * log(n)) time, # where n is number of", "collections import deque # min-pq class PriorityQueue: def __init__(self): self.heap = [] def", "for y in x[0]], [y.getMBR() for y in x[1]]) for x in upper_comp_distributions]", "prev_leaf_status) for curr_entry in entry_group1: curr_entry.getChild().setParent(node1) for curr_entry in entry_group2: curr_entry.getChild().setParent(node2) mbr_group1 =", "right_value2 = mbr.getLowerRight()[i] component_does_enclose = left_value1 <= left_value2 and right_value1 >= right_value2 if", "= upper_left_points + lower_right_points min_components = [] max_components = [] for i in", "prev_leaf_status) node2 = RTreeNode(parent, entry_group2, prev_leaf_status) for curr_entry in entry_group1: curr_entry.getChild().setParent(node1) for curr_entry", "upper_left[i] comp_1b = lower_right[i] term1 = comp_1b - comp_1a for j in xrange(i", "mbr def doesMatch(self, mbr): upper_left_matches = self.getUpperLeft() == mbr.getUpperLeft() lower_right_matches = self.getLowerRight() ==", ": 4]: # for entry in entries[0 : 15]: for entry in entries:", "updated on 2016-08-25 to fix overlap logic for determining when to attempt an", "im = Image.new(\"RGB\", (512, 512), \"white\") \"\"\" im = Image.new(\"RGB\", (768, 768), \"white\")", "== True: \"\"\" if node.isUnderfull() == True: # print \"underfull\" parent = node.getParent()", "for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) partner_entry = None if have_resulting_second_entry_from_split", "True: # if node.getNumChildren() == 0 and node == self.getRootEntry().getChild(): # if node.getNumChildren()", "component_mbr_list] lower_right_points = [x.getLowerRight() for x in component_mbr_list] points = upper_left_points + lower_right_points", "child = entry.getChild() child_str = self.toStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\"", "= entry def removeEntry(self, entry): curr_child = entry.getChild() (self.child_to_entry_dict).pop(curr_child) def getMinimumNumEntriesPerNode(self): return self.m", "entry = tree.getRootEntry() else: entry = parent.retrieveEntryForChild(node) children = [x.getChild() for x in", "and containment queries # dimension is implicit (determined using points sampled) and assumed", "def getContainedItem(self): return self.contained_item def getMBRList(self): return [self] def clone(self): upper_left = self.getUpperLeft()", "0.2) / 2 x1 = 0 y1 = 0 x2 = 47 y2", "if result == False: return False return True def toNumChildrenString(self): root = self.getRootEntry().getChild()", "mbr) enlarged_mbr_area = enlarged_mbr.getArea() area_change = enlarged_mbr_area - base_mbr_area return area_change @staticmethod def", "for n = 5500 # k = int(round((10000 * math.log(10000, 2)) ** (1", "CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) @staticmethod def rstarAdjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split): return tree.rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split)", "mbr = entry.getMBR() tagged_mbr_list = [] for curr_entry in entries: base_mbr = curr_entry.getMBR()", "+ x[1].getMarginValue() for x in upper_mbr_pairs] upper_margin_value_sum = sum(upper_margin_values) S_comp_value += upper_margin_value_sum S_comp_dict[i]", "entries] entry.draw(tree, entries, image, depth + 1) class MBR: def __init__(self, upper_left, lower_right):", "in mbr_list] enlargement_values = [x[0] for x in tagged_enlargement_values] min_enlargement_value = min(enlargement_values) candidate_tagged_enlargement_values", "directed graph implied by the r-tree # is not acyclic and we have", "return overall_str def toDepthString(self): root = self.getRootEntry().getChild() return self.toDepthStringHelper(root, 0) def toDepthStringHelper(self, node,", "the node if node.isUnderfull() == False: # print \"not underfull\" parent = node.getParent()", "True: first_entry, second_entry = resulting_entries_from_split partner_entry = second_entry if have_resulting_second_entry_from_split == True: partner_node", "start_rectangle_entries: close_descendant_entries = self.getRectangleCloseDescendants(start_rectangle_entry) for close_descendant_entry in close_descendant_entries: start_rectangle_to_close_ancestor_entries_dict[close_descendant_entry].append(start_rectangle_entry) return start_rectangle_to_close_ancestor_entries_dict def draw(self):", "if reference_mbr.doesEnclose(mbr) == False: continue # kick out close descendant candidates on occasion,", "x in added_nodes]) # print \"supernode #1\" return (RTree.SUPERNODE, [node]) elif split_status ==", "toDepthStringHelper(self, node, depth): if node == None: return \"\" entries = node.getEntries() children", "an r-tree; # takes O(n * log(n)) time at worst; # assumes that", "# a \"left\" comp_a1 = min(upper_left_a[i], lower_right_a[i]) # a \"right\" comp_a2 = max(upper_left_a[i],", "in curr_entries] mbr_list = [x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr)", "- window_left_sizes[i]) for i in range(len(window_left_sizes))] window_size_pairs = [x for x in window_size_pairs", "result2 = self.xtreeOverlapMinimalSplit(node, entry) entry_collection3, entry_collection4, dimension, do_fail = result2 # raise Exception()", "slower) # n = 2000 # n = 1000 # n = 20000", "((660, 268, 962), (1293, 619, 1521)), \\ ((798, 928, 1028), (1762, 1795, 1309)),", "= child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \" \") + \")\" return", "return is_leaf_node def addEntry(self, entry): curr_child = entry.getChild() (self.child_to_entry_dict)[curr_child] = entry def removeEntry(self,", "= next_x + offset perimeter_y = next_y + offset + radius image.draw(PythonMagick.DrawableCircle(center_x, center_y,", "\"\"\" # print tree.toString() # for entry in entries[0 : 4]: # print", "# close-descendant finding that takes O(log(n)) time on average # for start rectangle", "self.parent = node def isNonTraditionalLeafNode(self): is_non_traditional_leaf_node = (self.getParent() == None and self.getNumChildren() ==", "good enough to cut down branches explored; # to counter saturation, domain has", "in tagged_overlapped_mbr_list: curr_mbr, curr_entry = tagged_overlapped_mbr curr_node = curr_entry.getChild() result = self.findLeafHelper(entry, curr_node)", "\"start rectangle:\", entry.getMBR().toString() for close_ancestor_entry in close_ancestor_entry_list: print \"close ancestor:\", close_ancestor_entry.getMBR().toString() # raise", "== self.getRootEntry().getChild(): # if node.getNumChildren() == 0: # if node.isNonTraditionalLeafNode() == True: if", "that rectangles never have negative area for i in xrange(mbr_a.getDimension()): # a \"left\"", "chooseEntriesWithMinimalOverlapEnlargement(self, entries, entry): mbr_to_entry_dict = {} for i in range(len(entries)): curr_entry = entries[i]", "= 8 self.M = 16 self.child_to_entry_dict = {} for curr_entry in entries: curr_child", "return self.chooseLeafHelper(entry, self.getRootEntry().getChild()) def chooseLeafHelper(self, entry, node): if node.isLeafNode() == True: if node", "== False: continue # kick out close descendant candidates on occasion, # if", "node8) node8.setEntry(entry8) # problem here tree.insert(entry8) print tree.toString() print tree.doEnclosureQuery(curr_mbr2) curr_mbr3 = RawMBR((50,", "0: # if node.isNonTraditionalLeafNode() == True: if node.isLeafNode() == True and node ==", "finds one match if such a node exists # def delete(self, E, RN):", "Exception(\"expected a node to be found for a delete\") # if parent has", "slower) # n = 14500 # 170.053 seconds (~398x slower for 145x growth;", "in added_nodes: node.addEntry(added_node.getEntry()) added_node.setParent(node) if split_status == RTree.SPLIT: # added_node.setParent(node) if node.getNumChildren() >", "= lower_right[i] side = comp2 - comp1 sides.append(side) area = reduce(lambda x, y:", "curr_node in Q: curr_entry = curr_node.getEntry() # print \"mbr:\", curr_entry.getMBR().toString() # print \"tree:\",", "root_entry = RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) def getRootEntry(self): return self.root_entry def setRootEntry(self, root_entry):", "reference_mbr.doesEnclose(curr_mbr) curr_mbr_area = curr_mbr.getArea() first_priority_component = 0 if curr_mbr_is_contained == True else 1", "low_sorted_entries = entries[ : ] low_sorted_entries.sort(key = lambda x: x.getMBR().getUpperLeft()[i]) low_distributions = [(low_sorted_entries[", "image.strokeColor(\"none\") image.fillColor(color) center_x = next_x1 center_y = next_y1 radius = 4 perimeter_x =", "entry_group1, prev_leaf_status) node2 = RTreeNode(parent, entry_group2, prev_leaf_status) for curr_entry in entry_group1: curr_entry.getChild().setParent(node1) for", "else 1 second_priority_component = (-1 if root_mbr_is_contained == True else 1) * root_mbr_area", "elif color_choice == 1: color = PythonMagick.Color(0, 0, 65535, 32767) elif color_choice ==", "+ 1, self.getDimension()): comp_2a = upper_left[j] comp_2b = lower_right[j] term2 = comp_2b -", "= [] max_components = [] for i in xrange(base_mbr.getDimension()): components = [x[i] for", "in x[1]]) for x in upper_comp_distributions] upper_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x in", "be okay leaf_node.removeEntry(entry) self.condenseTree(leaf_node) # root = self.getRootEntry().getChild() \"\"\" if root.getNumChildren() == 1:", "query def doOverlapQuery(self, mbr, without_borders = False): partial_result = [] self.doOverlapQueryHelper(mbr, self.getRootEntry(), partial_result,", "node = leaf_node parent = node.getParent() if parent != None: curr_entries = node.getEntries()", "e, ee = split_result return tree.adjustTree(tree, l, [e, ee], True, False) else: return", "heap = self.heap pair = heap[0] result = pair return result def toList(self):", "[], True) entry = RTreeEntry(mbr, node) node.setEntry(entry) # entries.append(entry) \"\"\" # for entry", "False else \"+\" overall_str_list = [curr_leaf_status] else: overall_str_list = [] for entry in", "tree.rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split) @staticmethod def rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split): if node.getParent() ==", "running time estimates; the reason is that # otherwise the directed graph implied", "node.isLeafNode() == True: if node == self.getRootEntry().getChild(): return node else: return node.getParent() else:", "lower_right) self.mbr_list = mbr_list def getMBRList(self): return self.mbr_list def isComposite(self): return True @staticmethod", "= entry_pq.pop() item = None if len(heap) != 0: (priority,item) = heapq.heappop(heap) elif", "[] for i in xrange(dimension): comp_a1 = upper_left_a[i] comp_a2 = lower_right_a[i] comp_b1 =", "within reference mbr continue if conflict_x_tree.doEnclosureQueryWithEarlyStopping(mbr) == True: # ignore node if enclosing", "self.getRootEntry().setChild(next_root) next_root.setEntry(self.getRootEntry()) node1.setParent(next_root) node2.setParent(next_root) pass return (node1, node2, entry1, entry2) @staticmethod def rstarPreadjustTree(self,", "def isEqualTo(self, mbr): upper_left1 = self.getUpperLeft() lower_right1 = self.getLowerRight() upper_left2 = mbr.getUpperLeft() lower_right2", "y in x[0]], [y.getMBR() for y in x[1]]), x) for x in next_candidate_distributions]", "[x.getMBR() for x in partner_entries] partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if node.isLeafNode() == False:", "None # if node.isLeafNode() == True: candidate_entries = self.chooseEntriesWithMinimalOverlapEnlargement(entries, entry) if len(candidate_entries) !=", "e, ee = split_result return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [e, ee], True) else: return RTree.rstarAdjustTreeHelper(tree,", "distinction # updated on 2016-08-25 to fix overlap logic for determining when to", "20000 # 230.0411 seconds (~538x slower for 200x growth; expected 1528x slower) #", "def rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split): if node.getParent() == None: entry = tree.getRootEntry() curr_entries", "== True: candidate_entries = self.chooseEntriesWithMinimalOverlapEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(candidate_entries,", "\"-\" if (node.getParent() == None or (node.getParent() != None and node in node.getParent().getChildren()))", "entries: child = entry.getChild() child_str = self.toLeafStatusStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str =", "return self.xtreeInsert(entry) def chooseSubtree(self, entry, node): entries = node.getEntries() candidate_entries = None #", "= root_mbr.getArea() first_priority_component = 0 if root_mbr_is_contained == True else 1 second_priority_component =", "lower_right, point) return result_mbr def getContainedItem(self): return self.contained_item def getMBRList(self): return [self] def", "self.toStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \" \") +", "getChildren(self): return (self.child_to_entry_dict).keys() def getNumEntries(self): return len(self.child_to_entry_dict) def getNumChildren(self): return self.getNumEntries() def setParent(self,", "= CompositeMBR(None, None, None) root_entry = RTreeEntry(root_mbr, root_node) root_node.setEntry(root_entry) self.setRootEntry(root_entry) return else: entry", "in xrange(10): upper_left = (20, 20) lower_right = (40, 40) mbr = RawMBR(upper_left,", "well-formed r-tree, takes O(n * log(n)) time; # these times involve n, which", "start rectangle # taken from set of actual rectangles for an r-tree; #", "priority queue for leaf nodes # updated on 2016-11-16 to fix margin calculation", "for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return else: # raise Exception()", "x in mbr_pair_tagged_candidate_distributions] overlap_values = [x[0] for x in overlap_value_tagged_candidate_distributions] min_overlap_value = min(overlap_values)", "mbr to a point\") return mbr.getUpperLeft() def getVec(self): return self.vec def getComponent(self, d):", "= [x for x in d_S_pairs if x[1] == min_S_value] chosen_d_S_pair = min_S_value_d_S_pair_candidates[0]", "in overlap_value_tagged_candidate_distributions] min_overlap_value = min(overlap_values) matching_overlap_value_tagged_candidate_distributions = [x for x in overlap_value_tagged_candidate_distributions if", "False: does_enclose = False break return does_enclose def isEqualTo(self, mbr): upper_left1 = self.getUpperLeft()", "if was_successful == True: mbr_collection1 = [x.getMBR() for x in entry_collection1] mbr_collection2 =", "for entry in entries: child = entry.getChild() child_str = self.toEntriesArePresentStringHelper(child) curr_str = child_str", "leaf L that contains E is found # remove E from L #", "node.isUnderfull() == True: # print \"underfull\" parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) # don't use", "print tree.toString() # tree.delete(entry1) print tree.toString() # tree.delete(entry8) # tree.insert(entry1) \"\"\" tree.delete(entry1) tree.delete(entry2)", "= node.getEntries() priority_tagged_internal_entries = [] for curr_entry in entries: # set priority correctly", "node == self.getRootEntry().getChild(): # if node.getNumChildren() == 0: # if node.isNonTraditionalLeafNode() == True:", "None: return \"\" entries = node.getEntries() children = node.getChildren() have_node_str = True is_root_node", "node.isUnderfull() == False: # print \"not underfull\" parent = node.getParent() curr_entries = node.getEntries()", "= curr_entry.getMBR() mbr_to_entry_dict[curr_mbr] = curr_entry mbr_list = [x.getMBR() for x in entries] mbr", "= self.getUpperLeft() lower_right = self.getLowerRight() sides = [] for i in xrange(self.getDimension()): comp1", "ul_lr_pairs[i][1] # x = int(random.randint(1, 100)) # y = 10 # z =", "upper_left x2, y2 = lower_right margin = 2 * (x2 - x1) +", "# print entry_pq # raise Exception() result_entry_list = [] self.getRectangleCloseDescendantsHelper(heap, reference_mbr, result_entry_list, reference_entry)", "100, 0), (100, 100, 0), (100, 100, 0)) curr_mbr2 = RawMBR((50, 100, 0),", "True @staticmethod def makeMBRFromPoint(point): upper_left = point lower_right = point result_mbr = RawMBR(upper_left,", "def insert(self, entry): return self.xtreeInsert(entry) def chooseSubtree(self, entry, node): entries = node.getEntries() candidate_entries", "increase domains and # high inter-group overlap means maximal disjointedness # is not", "!= 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\" chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild()", "print \"supernode encountered\" parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) Q.append(node) # raise Exception() if node.getNumChildren()", "= 0 for i in xrange(self.getDimension()): comp_1a = upper_left[i] comp_1b = lower_right[i] term1", "self.mbr_list = mbr_list def getMBRList(self): return self.mbr_list def isComposite(self): return True @staticmethod def", "len(entry_collection4) < node.getMinimumNumEntriesPerNode(): return (False, None, None, dimension) else: return (True, entry_collection3, entry_collection4,", "center_y, perimeter_x, perimeter_y)) children = [x.getChild() for x in entries] entry.draw(tree, entries, image,", "doEnclosureQueryHelper(self, mbr, entry, partial_result): if entry.getMBR().isRaw() == True: if entry.getMBR().doesEnclose(mbr) == True: partial_result.append(entry)", "node8 = RTreeNode(None, [], True) entry8 = RTreeEntry(mbr8, node8) node8.setEntry(entry8) # problem here", "= [node.getEntry().getMBR().toString(), str(node)] else: overall_str_list = [] if node.getNumChildren() == 0 else [node.getEntry().getMBR().toString()]", "= upper_left self.lower_right = lower_right self.id_value = id_value def getUpperLeft(self): return self.upper_left def", "a non-traditional leaf leaf_node = child_node.getParent() if entry != self.getRootEntry() else None if", "# entry_pq.push(root_entry, priority) item = root_entry pair = (priority,item) heapq.heappush(heap,pair) # print entry_pq", "self.xtreeInsert(entry) def chooseSubtree(self, entry, node): entries = node.getEntries() candidate_entries = None # if", "follow the corresponding subtrees unti lthe leaf L that contains E is found", "= [node.getEntry().getMBR().toString()] # overall_str_list = [node.getEntry().getMBR().toString(), str(node)] else: overall_str_list = [] if node.getNumChildren()", "[x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) partner_entry = None if", "E.mbr # follow the corresponding subtrees unti lthe leaf L that contains E", "for curr_entry in entries: # set priority correctly and add to priority queue", "xrange(len(window_left_sizes))] curr_tuple = (low_distributions, upper_distributions) result_list.append(curr_tuple) return result_list @staticmethod def rstarChooseSplitAxis(entries, M, m):", "x.getEntry().getMBR().isRaw() == True] start_rectangle_entries = [x.getEntry() for x in start_rectangle_nodes] start_rectangle_to_close_ancestor_entries_dict = {}", "range(len(entries)): curr_entry = entries[i] curr_mbr = curr_entry.getMBR() mbr_to_entry_dict[curr_mbr] = curr_entry mbr_list = [x.getMBR()", "len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild()", "in node.getChildren(): result = self.hasConsistentNonTraditionalLeafDepthValuesHelper(curr_node, depth, curr_depth + 1) if result == False:", "for close-descendant # and close-ancestor finding; the assumption is necessary # to make", "== True: node.addEntry(entry) entry.getChild().setParent(node) \"\"\" elif node.getNumChildren() == 0: pass return (RTree.NO_SPLIT, [node])", "self.getRootEntry().setChild(next_root) else: pass MAX_OVERLAP_RATIO = 0.2 def xtreeSplitNode(self, node, entry): # we never", "next_result def xtreeOverlapMinimalSplit(self, node, entry): if node.getSplitHistoryRootDimension() == None: return (None, None, None,", "= 2 and M = 4 # n = 1,000 works in 3.428", "return distance class RTreeNode: def __init__(self, parent, entries, is_leaf, entry = None, split_history_root_dimension", "entry_group1, entry_group2 = result next_result = (entry_group1, entry_group2, axis) return next_result def xtreeOverlapMinimalSplit(self,", "self.getNumEntries() < self.getMinimumNumEntriesPerNode() def retrieveEntryForChild(self, node): return (self.child_to_entry_dict)[node] def toString(self): return str(self.getEntries()) class", "if self.getRootEntry().getChild() == node: next_root_entry = RTreeEntry(next_mbr, None) next_root = RTreeNode(None, [entry1, entry2],", "return intersection_volume def getMarginValue(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() if self.getDimension() ==", "point1 = (30, 100, 0) point2 = (40, 100, 0) point3 = (50,", "self.setRootEntry(root_entry) return else: entry = self.getRootEntry() curr_entries = entry.getChild().getEntries() children = [x.getChild() for", "x[0] == min_overlap_value] next_next_candidates = [x[1] for x in matching_overlap_value_tagged_candidate_distributions] if len(matching_overlap_value_tagged_candidate_distributions) >", "partner_node.getEntries() partner_children = [x.getChild() for x in partner_entries] partner_mbr_list = [x.getMBR() for x", "- 2 * m + 2 + 1)] window_left_sizes = [x for x", "as priority (prefer_contained, prefer_large_area_if_contained_else_small) if self.getRootEntry().getChild().getNumChildren() == 0: return [] reference_mbr = reference_entry.getMBR()", "True) entry6 = RTreeEntry(mbr6, node6) node6.setEntry(entry6) tree.insert(entry6) mbr7 = RawMBR(point7, (110, 200, 100),", "mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight() do_overlap = True # assume that rectangles never have", "child_node): return (self.child_to_entry_dict)[child_node] def getChildren(self): return (self.child_to_entry_dict).keys() def getNumEntries(self): return len(self.child_to_entry_dict) def getNumChildren(self):", "x in tagged_area_values if x[0] == min_area] candidate_entries = [x[1] for x in", "(RTree.SPLIT, [node]) elif node.isNonTraditionalLeafNode() == True: node.addEntry(entry) entry.getChild().setParent(node) \"\"\" elif node.getNumChildren() == 0:", "resulting_entries_from_split partner_entry = second_entry if have_resulting_second_entry_from_split == True and is_first_call_after_first_pass != True: partner_node", "-1 * root_mbr_area # entry_pq = PriorityQueue() heap = [] # entry_pq.push(root_entry, priority)", "node.getEntries() entry = None \"\"\" if node.getParent() == None: entry = tree.getRootEntry() else:", "without_borders): if entry.getMBR().isRaw() == True: if MBR.doOverlap(entry.getMBR(), mbr, without_borders) == True: partial_result.append(entry) else:", "isComposite(self): return False def getUpperLeft(self): return self.upper_left def getLowerRight(self): return self.lower_right def getArea(self):", "= RTreeNode(None, [], True) root_mbr = CompositeMBR(None, None, None) root_entry = RTreeEntry(root_mbr, root_node)", "upper_left1 = self.getUpperLeft() lower_right1 = self.getLowerRight() upper_left2 = mbr.getUpperLeft() lower_right2 = mbr.getLowerRight() is_equal", "image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y)) children = [x.getChild() for x in entries] entry.draw(tree, entries,", "== True: curr_depth = \"-\" if node.getNumEntries() != 0 else str(depth) overall_str_list =", "True: result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, curr_entry) if result == True: return True return False", "dimension = split_result if was_successful == True: mbr_collection1 = [x.getMBR() for x in", "next_entry = RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) curr_x_tree.insert(next_entry) union_area = curr_x_tree.getUnionArea() multi_overlap_ratio = overlap_area_sum /", "RTree.MAX_OVERLAP_RATIO: # raise Exception() result2 = self.xtreeOverlapMinimalSplit(node, entry) entry_collection3, entry_collection4, dimension, do_fail =", "make strong running time estimates; the reason is that # otherwise the directed", "if node.isSuperNode() == False: node.setToSuperNode(True) # questionable if this is really necessary for", "False def getUpperLeft(self): return self.upper_left def getLowerRight(self): return self.lower_right def getArea(self): upper_left =", "= [x.getMBR() for x in entry_collection1] mbr_collection2 = [x.getMBR() for x in entry_collection2]", "E_overall = node.getEntries() axis = node.getSplitHistoryRootDimension() result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1,", "entries] mbr = entry.getMBR() tagged_enlargement_values = [(MBR.findOverlapArea(x, mbr), x) for x in mbr_list]", "= RTreeEntry(curr_overall_mbr1, node1) entry2 = RTreeEntry(curr_overall_mbr2, node2) node1.setEntry(entry1) node2.setEntry(entry2) if parent != None:", "entries[0 : 15]: for entry in entries: tree2.insert(entry) \"\"\" if entry.getChild().getParent() == None:", "far, we should add children to priority queue entries = node.getEntries() priority_tagged_internal_entries =", "y1) return margin surface_area = 0 for i in xrange(self.getDimension()): comp_1a = upper_left[i]", "entry_to_close_ancestor_entry_list_pair print \"start rectangle:\", entry.getMBR().toString() for close_ancestor_entry in close_ancestor_entry_list: print \"close ancestor:\", close_ancestor_entry.getMBR().toString()", "== True: parent.removeEntry(entry) if (parent.getNumChildren() + 2) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(entry) parent.addEntry(partner_entry) entry.getChild().setParent(parent) partner_entry.getChild().setParent(parent)", "M = 4 # n = 1,000 works in 2.996 sec. for pypy", "def main(): point1 = (30, 100, 0) point2 = (40, 100, 0) point3", "return True @staticmethod def makeMBR(component_mbr_list): upper_left_points = [x.getUpperLeft() for x in component_mbr_list] lower_right_points", "is necessary # to make strong running time estimates; the reason is that", "z = 10 # lower_right = (x, y, z) # lower_right = lower_rights[i]", "root; # also, we implement delete(); note that our tree # has entry-aware", "def getSize(self): return len(self.heap) import math def getDistance(point1, point2): x1, y1 = point1", "denominator)) # for n = 1000 # k = int(round((5500 * math.log(5500, 2))", "mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for y in x[0]], [y.getMBR() for y in x[1]]), x)", "node.getParent() curr_entries = node.getEntries() entry = parent.retrieveEntryForChild(node) children = [x.getChild() for x in", "def doEnclosureQueryHelper(self, mbr, entry, partial_result): if entry.getMBR().isRaw() == True: if entry.getMBR().doesEnclose(mbr) == True:", "for x in entry_collection1] mbr_collection2 = [x.getMBR() for x in entry_collection2] # this", "(self.child_to_entry_dict)[curr_child] = entry def removeEntry(self, entry): curr_child = entry.getChild() (self.child_to_entry_dict).pop(curr_child) def getMinimumNumEntriesPerNode(self): return", "196, 750), (1085, 718, 1259)), \\ ((808, 926, 151), (889, 1755, 320)), \\", "base_mbr.getArea() enlarged_mbr = MBR.getEnlargedMBR(base_mbr, mbr) enlarged_mbr_area = enlarged_mbr.getArea() area_change = enlarged_mbr_area - base_mbr_area", "0: root_node = RTreeNode(None, [], True) root_mbr = CompositeMBR(None, None, None) root_entry =", "594)), \\ ((294, 238, 1036), (785, 378, 1963)), \\ ((803, 1054, 307), (1776,", "/ (1.0 * union_area) if multi_overlap_ratio <= RTree.MAX_OVERLAP_RATIO: node.setToSuperNode(False) elif node.isUnderfull() == True:", "for pypy with m = 8 and M = 16 # these numbers", "entry_collection1, entry_collection2, dimension = result1 mbr_collection1 = [x.getMBR() for x in entry_collection1] mbr_collection2", "parent.removeEntry(original_entry) if node != self.getRootEntry().getChild(): parent.addEntry(entry1) parent.addEntry(entry2) node1.setParent(parent) node2.setParent(parent) else: next_root = RTreeNode(None,", "setRootEntry(self, root_entry): self.root_entry = root_entry def hasConsistentNonTraditionalLeafDepthValues(self): root = self.getRootEntry().getChild() curr_node = root", "== lower_right2 return is_equal class RawMBR(MBR): def __init__(self, upper_left, lower_right, contained_item): MBR.__init__(self, upper_left,", "# print \"supernode encountered\" parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) Q.append(node) # raise Exception() if", "x in tagged_mbr_list] area_values = [x[0] for x in tagged_area_values] min_area = min(area_values)", "for curr_mbr in mbr_list: next_mbr = RawMBR(curr_mbr.getUpperLeft(), curr_mbr.getLowerRight(), None) next_node = RTreeNode(None, [],", "!= self.getRootEntry().getChild(): parent.addEntry(entry1) parent.addEntry(entry2) node1.setParent(parent) node2.setParent(parent) else: next_root = RTreeNode(None, [entry1, entry2], False)", "(761, 1089, 594)), \\ ((294, 238, 1036), (785, 378, 1963)), \\ ((803, 1054,", "mbr, entry, partial_result): if entry.getMBR().isRaw() == True: if entry.getMBR().doesEnclose(mbr) == True: partial_result.append(entry) else:", "close descendant candidates on occasion, # if containment query for conflict x-tree returns", "0), point3) curr_mbr2b = RawMBR((50, 50, 0), (100, 100, 0), HyperRectangle((50, 50, 0),", "((1081, 1056, 1020), (1708, 1075, 1542)), \\ ((358, 815, 372), (761, 1089, 594)),", "# RN is an internal node # find all entries of RN that", "xrange(entries[0].getMBR().getDimension()): low_sorted_entries = entries[ : ] low_sorted_entries.sort(key = lambda x: x.getMBR().getUpperLeft()[i]) low_distributions =", "== None: return \"\" entries = node.getEntries() children = node.getChildren() have_node_str = True", "100), point1) node1 = RTreeNode(None, [], True) entry1 = RTreeEntry(mbr1, node1) node1.setEntry(entry1) tree.insert(entry1)", "0, 32767) if upper_left == lower_right: image.strokeColor(\"none\") image.fillColor(color) center_x = next_x1 center_y =", "curr_depth): if node == None: return elif node.isLeafNode() == True: if depth !=", "node_list = [] self.getNodesHelper(self.getRootEntry().getChild(), node_list) return node_list def getNodesHelper(self, node, partial_result): partial_result.append(node) for", "and # best-first priority queue for leaf nodes # updated on 2016-11-16 to", "tree = RTree() print tree.toString() curr_root = tree.getRootEntry().getChild() mbr1 = RawMBR(point1, (110, 200,", "= int(round((10000 * math.log(10000, 2)) ** (1 / 3.0) / denominator)) # for", "= comp_2b - comp_2a term = 2 * term1 * term2 surface_area +=", "curr_mbr = MBR.getEnlargedMBR(base_mbr, mbr) tagged_mbr_list.append((curr_mbr, curr_entry)) tagged_area_values = [(x[0].getArea(), x[1]) for x in", "item is in tree # returns a node, which can be None if", "find all entries of RN that cover E.mbr # follow the corresponding subtrees", "side = comp2 - comp1 sides.append(side) area = reduce(lambda x, y: x *", "return self.rstarChooseLeafHelper(entry, chosen_child) def insert(self, entry): return self.xtreeInsert(entry) def chooseSubtree(self, entry, node): entries", "768), \"white\") draw = ImageDraw.Draw(im) root = self.getRoot() root.draw(self, draw, 0) im.save(\"tree.png\", \"PNG\")", "x in low_mbr_pairs] low_margin_value_sum = sum(low_margin_values) S_comp_value += low_margin_value_sum upper_constituent_mbr_list_pairs = [([y.getMBR() for", "return False def delete(self, entry): # print \"hello\" did_find_leaf = self.findLeaf(entry) child_node =", "[], True) entry2 = RTreeEntry(mbr2, node2) node2.setEntry(entry2) tree.insert(entry2) mbr3 = RawMBR(point3, (110, 200,", "is an internal node # find all entries of RN that cover E.mbr", "(822, 1305, 1149)), \\ ((800, 709, 871), (1390, 1402, 1548)), \\ ((433, 499,", "x1, y1 = point1 x2, y2 = point2 change_x = x2 - x1", "pointers for xtreeInsert(); # have supernode demotion when size decreases to or below", "RTreeEntry(curr_overall_mbr1, node1) entry2 = RTreeEntry(curr_overall_mbr2, node2) node1.setEntry(entry1) node2.setEntry(entry2) if parent != None: original_entry", "!= mbr.getLowerRight(): raise Exception(\"attempted to turn a non-point mbr to a point\") return", "def getDimension(self): return len(self.getUpperLeft()) def doesEnclose(self, mbr): dimension = self.getDimension() does_enclose = True", "len(tree2.getNodes()) import time time1 = time.time() result = tree2.getAllRectangleCloseAncestors() time2 = time.time() time_diff", "step is crucial # if node.isNonTraditionalLeafNode() == False: # this is idempotent for", "start_rectangle_entry in start_rectangle_entries: start_rectangle_to_close_ancestor_entries_dict[start_rectangle_entry] = [] for start_rectangle_entry in start_rectangle_entries: close_descendant_entries = self.getRectangleCloseDescendants(start_rectangle_entry)", "1 min. 54 sec. for pypy with m = 2 and M =", "(110, 200, 100), point5) node5 = RTreeNode(None, [], True) entry5 = RTreeEntry(mbr5, node5)", "\"\"\" if node.isSuperNode() == True: # print \"supernode encountered\" parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node))", "\"\"\" for i in xrange(10): upper_left = (20, 20) lower_right = (40, 40)", "__init__(self, upper_left, lower_right, id_value): self.upper_left = upper_left self.lower_right = lower_right self.id_value = id_value", "# for n = 10000 # k = int(round((20000 * math.log(20000, 2)) **", "len(self.heap) == 0 def peek(self): heap = self.heap pair = heap[0] result =", "/ 2 x1 = 0 y1 = 0 x2 = 47 y2 =", "is_leaf_node = self.getNumChildren() == 0 return is_leaf_node def addEntry(self, entry): curr_child = entry.getChild()", "x in d_S_pairs if x[1] == min_S_value] chosen_d_S_pair = min_S_value_d_S_pair_candidates[0] chosen_d_value = chosen_d_S_pair[0]", "root_mbr_is_contained == True else 1 second_priority_component = (-1 if root_mbr_is_contained == True else", "self.mbr def setMBR(self, mbr): self.mbr = mbr def getChild(self): return self.child def setChild(self,", "= entry.getChild() child_str = self.toDepthStringHelper(child, depth + 1) curr_str = child_str overall_str_list.append(curr_str) overall_str", "center_x = next_x + offset center_y = next_y + offset radius = 2", "= int(100 + random.randint(0, k) * 100) # y1 = int(100 + random.randint(0,", "overall_str def toLeafStatusString(self): root = self.getRootEntry().getChild() return self.toLeafStatusStringHelper(root) def toLeafStatusStringHelper(self, node): if node", "next_result def xtreeSupernodeInsert(self, node, entries): if node.isSuperNode() == False: node.setToSuperNode(True) # questionable if", "node1) node1.setEntry(entry1) tree.insert(entry1) mbr2 = RawMBR(point2, (110, 200, 100), point2) node2 = RTreeNode(None,", "same entries # unless we explicitly create new entries, # which we do", "= next_y + offset radius = 2 perimeter_x = next_x + offset perimeter_y", "x in points] min_comp_value = min(components) max_comp_value = max(components) min_components.append(min_comp_value) max_components.append(max_comp_value) upper_left_point =", "exists # def delete(self, E, RN): def findLeaf(self, entry): return self.findLeafHelper(entry, self.getRootEntry()) def", "= [] self.doEnclosureQueryHelper(mbr, self.getRootEntry(), partial_result) return partial_result def doEnclosureQueryHelper(self, mbr, entry, partial_result): if", "ignore if contained rectangle is contained by a rectangle in conflict x-tree, #", "\\ ((800, 709, 871), (1390, 1402, 1548)), \\ ((433, 499, 483), (1300, 1330,", "100), point8) node8 = RTreeNode(None, [], True) entry8 = RTreeEntry(mbr8, node8) node8.setEntry(entry8) #", "x in low_constituent_mbr_list_pairs] low_margin_values = [x[0].getMarginValue() + x[1].getMarginValue() for x in low_mbr_pairs] low_margin_value_sum", "we explicitly create new entries, # which we do do occasionally # note", "candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(candidate_entries, entry) if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\"", "for y in x[1]]) for x in upper_comp_distributions] upper_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for", "next_curr_node.setParent(node2) entry1 = RTreeEntry(curr_overall_mbr1, node1) entry2 = RTreeEntry(curr_overall_mbr2, node2) node1.setEntry(entry1) node2.setEntry(entry2) if parent", "self.doOverlapQueryHelper(mbr, self.getRootEntry(), partial_result, without_borders) return partial_result def doOverlapQueryHelper(self, mbr, entry, partial_result, without_borders): if", "assumed to be consistent # we never split a super-node # updated on", "def clone(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() contained_item = self.getContainedItem() mbr =", "a O(log(n)) time operation # to find the entry containing node; just look", "# internal_node_stack_deque.appendleft(item) priority = (first_priority_component, second_priority_component) priority_tagged_internal_entry = (priority, curr_entry) priority_tagged_internal_entries.append(priority_tagged_internal_entry) # item", "if its entry matches the ignore entry continue if node.isLeafNode() == True: #", "raise Exception(\"attempted to turn a non-point mbr to a point\") return mbr.getUpperLeft() def", "reference_entry) return result_entry_list # def TopicKNearestNeighborBestFirstSearchHelper(self, heap, point, TopicKNearest, k): def getRectangleCloseDescendantsHelper(self, heap,", "print tree.toString() print tree.doEnclosureQuery(curr_mbr2) curr_mbr3 = RawMBR((50, 100, 0), (110, 200, 100), None)", "comp_b2 and comp_a2 >= comp_b1 if do_overlap == False: break return do_overlap @staticmethod", "node, entry): # we never split a super-node if node.isSuperNode() == True: #", "def doEnclosureQuery(self, mbr): partial_result = [] self.doEnclosureQueryHelper(mbr, self.getRootEntry(), partial_result) return partial_result def doEnclosureQueryHelper(self,", "((798, 928, 1028), (1762, 1795, 1309)), \\ ((225, 359, 290), (579, 950, 700)),", "[] max_components = [] for i in xrange(component_mbr_list[0].getDimension()): components = [x[i] for x", "node = RTreeNode(None, [], True) entry = RTreeEntry(mbr, node) node.setEntry(entry) # entries.append(entry) \"\"\"", "entry) \"\"\" chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return chosen_entry def xtreeInsert(self, entry):", "return node_list def getNodesHelper(self, node, partial_result): partial_result.append(node) for curr_node in node.getChildren(): self.getNodesHelper(curr_node, partial_result)", "raise Exception() for entry_to_close_ancestor_entry_list_pair in result.items(): entry, close_ancestor_entry_list = entry_to_close_ancestor_entry_list_pair print \"start rectangle:\",", "S_comp_value = 0 low_constituent_mbr_list_pairs = [([y.getMBR() for y in x[0]], [y.getMBR() for y", "result_entry_list = [] self.getRectangleCloseDescendantsHelper(heap, reference_mbr, result_entry_list, reference_entry) return result_entry_list # def TopicKNearestNeighborBestFirstSearchHelper(self, heap,", "= CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if node.isLeafNode() == False: if have_resulting_second_entry_from_split == True: if (parent.getNumChildren()", "close_descendant_entries: start_rectangle_to_close_ancestor_entries_dict[close_descendant_entry].append(start_rectangle_entry) return start_rectangle_to_close_ancestor_entries_dict def draw(self): # im = Image.new(\"RGB\", (512, 512), \"white\")", "Q) return # not tested # returns entries # does intersection query def", "1x slower) # n = 1000 # 1.1649 seconds (~2.72x slower for 10x", "entry, image, depth) @staticmethod def drawHelper(tree, entry, image, depth): node = entry.getChild() entries", "or len(heap) != 0: # entry = entry_pq.pop() item = None if len(heap)", "= internal_entry internal_node_stack_deque.appendleft(item) # print \"conflict x-tree:\", conflict_x_tree.toString() # for a well-formed r-tree,", "self.mbr_list def isComposite(self): return True @staticmethod def makeMBR(component_mbr_list): upper_left_points = [x.getUpperLeft() for x", "insert parent = node.getParent() entry1 = RTreeEntry(CompositeMBR.makeMBR(mbr_collection1), None) node1 = RTreeNode(parent, entry_collection1, None,", "chosen_distribution_pair = next_next_candidates[0] return chosen_distribution_pair def chooseLeaf(self, entry): return self.chooseLeafHelper(entry, self.getRootEntry().getChild()) def chooseLeafHelper(self,", "dimension) else: return (True, entry_collection1, entry_collection2, dimension) def xtreeTopologicalSplit(self, node, entry): m =", "= [] for i in xrange(base_mbr.getDimension()): components = [x[i] for x in points]", "curr_mbr_area = curr_mbr.getArea() first_priority_component = 0 if curr_mbr_is_contained == True else 1 second_priority_component", "curr_entry.getChild().setParent(node1) for curr_entry in entry_group2: curr_entry.getChild().setParent(node2) mbr_group1 = [x.getMBR() for x in entry_group1]", "4 == 0: upper_left = (0, 0) lower_right = (10, 10) elif i", "= result curr_entry = node.getEntry() curr_mbr = curr_entry.getMBR() mbr = entry.getMBR() next_mbr =", "node.getParent().retrieveEntryForChild(node) \"\"\" entry = parent.retrieveEntryForChild(node) children = [x.getChild() for x in curr_entries] mbr_list", "in partner_entries] partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if node.isLeafNode() == False: if have_resulting_second_entry_from_split ==", "match if such a node exists # def delete(self, E, RN): def findLeaf(self,", "high-level to low-level # Q = list(set(Q)) Q.reverse() for curr_node in Q: curr_entry", "True @staticmethod def makeMBR(component_mbr_list): upper_left_points = [x.getUpperLeft() for x in component_mbr_list] lower_right_points =", "denominator)) # for n = 20000 # k = int(round((14500 * math.log(14500, 2))", "mbr_collection1 = [x.getMBR() for x in entry_collection1] mbr_collection2 = [x.getMBR() for x in", "min(comp_a2, comp_b2) - max(comp_a1, comp_b1)) sides.append(side) intersection_volume = reduce(lambda x, y: x *", "image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2)) image.write(\"tree.png\") def main(): point1 = (30, 100, 0) point2", "def isComposite(self): return False def getUpperLeft(self): return self.upper_left def getLowerRight(self): return self.lower_right def", "< node.getMinimumNumEntriesPerNode(): return (False, None, None, dimension) else: return (True, entry_collection3, entry_collection4, dimension)", "4 # n = 1,000 works in 3.428 sec. for pypy with m", "a little stilted since we don't need a O(log(n)) time operation # to", "for x in candidate_tagged_area_values] return candidate_entries @staticmethod def rstarGenDistributions(entries, M, m): result_list =", "RTree.rstarGenDistributions(entries, M, m) candidate_distributions = None candidate_distributions = result[axis][0] + result[axis][1] mbr_list_pair_tagged_candidate_distributions =", "+ 1): raise Exception() window_left_sizes = [m - 1 + k for k", "draw = ImageDraw.Draw(im) root = self.getRoot() root.draw(self, draw, 0) im.save(\"tree.png\", \"PNG\") \"\"\" #", "tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return (have_resulting_second_entry_from_split, resulting_entries_from_split) else: parent = node.getParent() curr_entries =", "k) * 100) # y1 = int(100 + random.randint(0, k) * 100) #", "n and do not increase domains and # high inter-group overlap means maximal", "100, 0) point6 = (80, 100, 0) point7 = (90, 100, 0) point8", "def addEntry(self, entry): curr_child = entry.getChild() (self.child_to_entry_dict)[curr_child] = entry def removeEntry(self, entry): curr_child", "comp_2a = upper_left[j] comp_2b = lower_right[j] term2 = comp_2b - comp_2a term =", "combined_area_values = [x[0] for x in combined_area_tagged_next_candidate_distributions] min_combined_area_value = min(combined_area_values) matching_combined_area_tagged_next_candidate_distributions = [x", "[x.getMBR() for x in entry_collection2] mbr1 = CompositeMBR.makeMBR(mbr_collection1) mbr2 = CompositeMBR.makeMBR(mbr_collection2) overlap_area =", "+ \")\" return overall_str def toEntriesArePresentString(self): root = self.getRootEntry().getChild() return self.toEntriesArePresentStringHelper(root) def toEntriesArePresentStringHelper(self,", "1: # shorten tree entries = root.getEntries() chosen_entry = entries[0] chosen_child = chosen_entry.getChild()", "if we increase n and do not increase domains and # high inter-group", "dimension = None result1 = self.xtreeTopologicalSplit(node, entry) entry_collection1, entry_collection2, dimension = result1 mbr_collection1", "True: return True else: entries = entry.getChild().getEntries() for curr_entry in entries: if curr_entry.getMBR().doesEnclose(mbr)", "tree.delete(entry7) tree.delete(entry8) \"\"\" print tree.toString() tree2 = RTree() import random entries = []", "internal_node_stack_deque.popleft() # (priority,item) = heapq.heappop(heap) entry = item node = entry.getChild() mbr =", "True: priority = -1 * curr_mbr_area item = curr_entry pair = (priority,item) heapq.heappush(heap,pair)", "node.setEntry(entry) entries.append(entry) \"\"\" for i in xrange(10): upper_left = (20, 20) lower_right =", "entries): if node.isSuperNode() == False: node.setToSuperNode(True) # questionable if this is really necessary", "self.chooseEntriesWithMinimalAreaEnlargement(candidate_entries, entry) if len(candidate_entries) != 1: candidate_entries = self.resolveEnlargementTie(candidate_entries, entry) \"\"\" else: candidate_entries", "RawMBR((50, 100, 0), (110, 200, 100), None) print tree.doContainmentQuery(curr_mbr3) # raise Exception() print", "getIDValue(self): return self.id_value class Point: def __init__(self, vec, id_value): self.vec = vec self.id_value", "mbr_list = [x.getMBR() in node.getEntries()] curr_x_tree = RTree() overlap_area_sum = sum([x.getArea() for x", "and close-ancestor finding; the assumption is necessary # to make strong running time", "keep_nodes: Q.append(keep_node) # only makes sense to speak of modifying mbr if we", "heap = [] # entry_pq.push(root_entry, priority) item = root_entry pair = (priority,item) heapq.heappush(heap,pair)", "= RTreeNode(None, [], True) entry2 = RTreeEntry(mbr2, node2) node2.setEntry(entry2) tree.insert(entry2) mbr3 = RawMBR(point3,", "# which for a well-formed r-tree, takes O(n * log(n)) time; # these", "= (50, 100, 0) point4 = (60, 100, 0) point5 = (70, 100,", "None if leaf_node == None: raise Exception(\"expected a node to be found for", "= 0 if root_mbr_is_contained == True else 1 second_priority_component = (-1 if root_mbr_is_contained", "node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) Q.append(node) # raise Exception() if node.getNumChildren() <= 1: # raise Exception()", "# n = 1000 # 1.1649 seconds (~2.72x slower for 10x growth; expected", "in entry_collection1] mbr_collection2 = [x.getMBR() for x in entry_collection2] # this line presumes", "elif len(internal_node_stack_deque) != 0: item = internal_node_stack_deque.popleft() # (priority,item) = heapq.heappop(heap) entry =", "16 # these numbers are for upper-left's in (100, 10100) and # lower-right's", "= True is_root_node = node == self.getRootEntry().getChild() if is_root_node == True: have_node_str =", "= RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) curr_x_tree.insert(next_entry) union_area = curr_x_tree.getUnionArea() multi_overlap_ratio = overlap_area_sum / (1.0", "if ended_with_split2 == True: e, ee = resulting_entries_from_split l = e.getChild() ll =", "4 == 2: upper_left = (60, 60) lower_right = (80, 80) elif i", "MBR: def __init__(self, upper_left, lower_right): self.upper_left = upper_left self.lower_right = lower_right def isRaw(self):", "O(n * log(n)) time, # where n is number of actual rectangles or", "an r-tree and O(n * log(n)) time at worst; # and to add", "(self.getParent() == None and self.getNumChildren() == 0) or (self.getNumChildren() != 0 and False", "surface_area = 0 for i in xrange(self.getDimension()): comp_1a = upper_left[i] comp_1b = lower_right[i]", "getComponent(self, d): return self.getVec()[d] def getIDValue(self): return self.id_value import string class RTree: def", "tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) \"\"\" def rstarSplitNode(self, node, entry): curr_node = node E_overall =", "S_comp_dict[i] = S_comp_value d_S_pairs = S_comp_dict.items() min_S_value = min([x[1] for x in d_S_pairs])", "# for entry in entries[0 : 4]: # for entry in entries[0 :", "entry in entries: child = entry.getChild() child_str = self.toStringHelper(child) curr_str = child_str overall_str_list.append(curr_str)", "pair = heap[0] result = pair return result def toList(self): pair_list = self.heap", "= curr_entry.getMBR() curr_mbr_is_actual = curr_mbr.isRaw() curr_mbr_is_contained = reference_mbr.doesEnclose(curr_mbr) curr_mbr_area = curr_mbr.getArea() first_priority_component =", "self.doEnclosureQueryHelper(mbr, self.getRootEntry(), partial_result) return partial_result def doEnclosureQueryHelper(self, mbr, entry, partial_result): if entry.getMBR().isRaw() ==", "mbr_b.getLowerRight() do_overlap = True # assume that rectangles never have negative area for", "second_entry if have_resulting_second_entry_from_split == True: partner_node = partner_entry.getChild() partner_entries = partner_node.getEntries() partner_children =", "100), point6) node6 = RTreeNode(None, [], True) entry6 = RTreeEntry(mbr6, node6) node6.setEntry(entry6) tree.insert(entry6)", "return [self] def clone(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight() contained_item = self.getContainedItem()", "mbr), x) for x in mbr_list] enlargement_values = [x[0] for x in tagged_enlargement_values]", "200, 100), None) print tree.doContainmentQuery(curr_mbr3) # raise Exception() print tree.doOverlapQuery(curr_mbr2) # raise Exception()", "= self.getRootEntry() root_node = root_entry.getChild() root_mbr = root_entry.getMBR() root_mbr_is_actual = root_mbr.isRaw() root_mbr_is_contained =", "partial_result): if entry.getMBR().isRaw() == True: # print mbr.toString(), entry.getMBR().toString() if mbr.doesEnclose(entry.getMBR()) == True:", "True] start_rectangle_entries = [x.getEntry() for x in start_rectangle_nodes] start_rectangle_to_close_ancestor_entries_dict = {} for start_rectangle_entry", "378, 1963)), \\ ((803, 1054, 307), (1776, 1597, 501)), \\ ((803, 233, 521),", "= self.findLeafHelper(entry, next_entry) if result == True: return result return False def delete(self,", "# z2 = int(z1 + random.random() * 100) x = random.randint(0, 10000) y", "a super-node if node.isSuperNode() == True: # raise Exception() return (False, None, None,", "m) result = RTree.rstarChooseSplitIndex(E_overall, axis, M, m) entry_group1, entry_group2 = result parent =", "x[0]], [y.getMBR() for y in x[1]]), x) for x in next_candidate_distributions] mbr_pair_tagged_next_candidate_distributions =", "= 1 NO_SPLIT = 2 def xtreeInsertHelper(self, entry, node): split_status = None next_mbr", "return node else: return node.getParent() else: entries = node.getEntries() candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries, entry)", "= [entry.getMBR()] for mbr in mbr_list: upper_left = mbr.getUpperLeft() lower_right = mbr.getLowerRight() x1,", "@staticmethod def rstarChooseSplitAxis(entries, M, m): result = RTree.rstarGenDistributions(entries, M, m) S_comp_dict = {}", "= -1 * root_mbr_area # entry_pq = PriorityQueue() heap = [] # entry_pq.push(root_entry,", "m and x[1] <= M and x[1] >= m] for i in xrange(entries[0].getMBR().getDimension()):", "sense to speak of modifying mbr if we plan on keeping the node", "entry.getChild() (self.child_to_entry_dict).pop(curr_child) def getMinimumNumEntriesPerNode(self): return self.m def getMaximumNumEntriesPerNode(self): return self.M def isFull(self): return", "!= None: curr_entries = node.getEntries() entry = node.getParent().retrieveEntryForChild(node) children = [x.getChild() for x", "mbr_list = [x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) partner_entry =", "when size decreases to or below M # updated on 2016-11-06 to add", "self.getRootEntry().getChild() == node: next_root_entry = RTreeEntry(next_mbr, None) next_root = RTreeNode(None, [entry1, entry2], None,", "pass def condenseTree(self, leaf_node): Q = [] self.condenseTreeHelper(leaf_node, Q) # Q is in", "next_x1 perimeter_y = next_y1 + radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y)) else: image.strokeColor(color) image.fillColor(\"none\")", "(self.child_to_entry_dict).values() def getEntryForChild(self, child_node): return (self.child_to_entry_dict)[child_node] def getChildren(self): return (self.child_to_entry_dict).keys() def getNumEntries(self): return", "have_resulting_second_entry_from_split == True: partner_node = partner_entry.getChild() partner_entries = partner_node.getEntries() partner_children = [x.getChild() for", "* 0.2) / 2 next_x = multiplier * x next_y = multiplier *", "sum([x.getArea() for x in mbr_list]) for curr_mbr in mbr_list: next_mbr = RawMBR(curr_mbr.getUpperLeft(), curr_mbr.getLowerRight(),", "RTreeNode(None, [entry1, entry2], False) self.getRootEntry().setChild(next_root) next_root.setEntry(self.getRootEntry()) node1.setParent(next_root) node2.setParent(next_root) pass return (node1, node2, entry1,", "node.isNonTraditionalLeafNode() == True: node.addEntry(entry) entry.getChild().setParent(node) \"\"\" elif node.getNumChildren() == 0: pass return (RTree.NO_SPLIT,", "a well-formed r-tree, this takes O(n * log(n)) time, # where n is", "/ denominator)) # for n = 20000 # k = int(round((14500 * math.log(14500,", "curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) partner_entry = None if have_resulting_second_entry_from_split == True: first_entry,", "* math.log(14500, 2)) ** (1 / 3.0) / denominator)) # for n =", "[], True) entry1 = RTreeEntry(mbr1, node1) node1.setEntry(entry1) tree.insert(entry1) mbr2 = RawMBR(point2, (110, 200,", "node.getNumChildren() == 0 else [node.getEntry().getMBR().toString()] # overall_str_list = [] if node.getNumChildren() == 0", "Exception() # print \"decision point\" \"\"\" if node.isSuperNode() == True: # print \"supernode", "True: return result return False def delete(self, entry): # print \"hello\" did_find_leaf =", "= 16 # these numbers are for upper-left's in (100, 10100) and #", "in x[0]], [y.getMBR() for y in x[1]]), x) for x in candidate_distributions] mbr_pair_tagged_candidate_distributions", "b \"left\" comp_b1 = min(upper_left_b[i], lower_right_b[i]) # b \"right\" comp_b2 = max(upper_left_b[i], lower_right_b[i])", "= Image.new(\"RGB\", (512, 512), \"white\") \"\"\" im = Image.new(\"RGB\", (768, 768), \"white\") draw", "= (768 * 0.2) / 2 offset = (1536 * 0.2) / 2", "in node.getChildren(): self.getNodesHelper(curr_node, partial_result) def getNodesForNode(self, node): node_list = [] self.getNodesHelper(node, node_list) return", "return start_rectangle_to_close_ancestor_entries_dict def draw(self): # im = Image.new(\"RGB\", (512, 512), \"white\") \"\"\" im", "using order of high-level to low-level # Q = list(set(Q)) Q.reverse() for curr_node", "if curr_entry.getMBR().isRaw() == True: if entry == curr_entry: return True else: return False", "return self.findLeafHelper(entry, self.getRootEntry()) def findLeafHelper(self, entry, curr_entry): \"\"\" if node.isLeafNode() == False: curr_mbr", "True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for curr_entry in entries: if curr_entry.getMBR().doesEnclose(mbr) ==", "for curr_entry in entry_group1: next_curr_node = curr_entry.getChild() if curr_entry != entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node1)", "area1 + area2 - overlap_area ovelap_ratio = None if union_area == 0: if", "(have_resulting_second_entry_from_split, resulting_entries_from_split) else: parent = node.getParent() curr_entries = node.getEntries() entry = None \"\"\"", "overlap-minimal split # updated on 2016-11-03 to re-structure and modify adjustTree(); # stop", "950, 700)), \\ ((297, 196, 750), (1085, 718, 1259)), \\ ((808, 926, 151),", "getUpperLeft(self): return self.upper_left def getLowerRight(self): return self.lower_right def getIDValue(self): return self.id_value class Point:", "list(set(curr_node.getEntries() + [entry])) return self.rstarSplitNodeHelper(node, E_overall, entry) def rstarSplitNodeHelper(self, node, E_overall, entry): #", "= \"-\" if (node.getParent() == None or (node.getParent() != None and node in", "never have negative area for i in xrange(mbr_a.getDimension()): # a \"left\" comp_a1 =", "resolveEnlargementTie(self, entries, entry): mbr = entry.getMBR() tagged_mbr_list = [] for curr_entry in entries:", "a super-node # updated on 2016-08-23 to fix traditional/non-traditional isLeafNode() distinction # updated", "# these times involve n, which is number of actual rectangles # or", "[x for x in tree.getNodes() if x.isSuperNode() == True], tree.getRootEntry().getChild() # tree2.draw() print", "entry in entries: child = entry.getChild() child_str = self.toLeafStatusStringHelper(child) curr_str = child_str overall_str_list.append(curr_str)", "entries = entry.getChild().getEntries() for curr_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), mbr) == True: self.doContainmentQueryHelper(mbr,", "= self.doEnclosureQueryWithEarlyStoppingHelper(mbr, curr_entry) if result == True: return True return False # returns", "= RTreeEntry(next_mbr, None) next_root = RTreeNode(None, [entry1, entry2], None, next_root_entry) next_root_entry.setChild(next_root) self.setRootEntry(next_root_entry) node1.setParent(next_root)", "offset perimeter_y = next_y + offset + radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y)) children", "raise Exception() print tree.toString() # tree.delete(entry1) print tree.toString() # tree.delete(entry8) # tree.insert(entry1) \"\"\"", "= [x.getMBR() for x in entry_group2] curr_overall_mbr1 = CompositeMBR.makeMBR(mbr_group1) curr_overall_mbr2 = CompositeMBR.makeMBR(mbr_group2) for", "for pypy with m = 2 and M = 4 # n =", "(1.0 * union_area) if multi_overlap_ratio <= RTree.MAX_OVERLAP_RATIO: node.setToSuperNode(False) elif node.isUnderfull() == True: \"\"\"", "33x slower) # n = 5500 # 23.899 seconds (~55.96x slower for 55x", "for x in low_comp_distributions] low_mbr_pairs = [(CompositeMBR.makeMBR(x[0]), CompositeMBR.makeMBR(x[1])) for x in low_constituent_mbr_list_pairs] low_margin_values", "(first_priority_component, second_priority_component) if curr_mbr.isRaw() == True: priority = -1 * curr_mbr_area item =", "(parent.getNumChildren() + 1) <= parent.getMaximumNumEntriesPerNode(): parent.addEntry(partner_entry) partner_entry.getChild().setParent(parent) return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False) else:", "and False not in [x.getChild().getNumEntries() == 0 for x in self.getEntries()]) return is_non_traditional_leaf_node", "split just in case # print \"split\" return (RTree.SPLIT, [node]) elif node.isNonTraditionalLeafNode() ==", "+ 1) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" + string.join(overall_str_list, \" \")", "chosen_entry.getChild() self.setRoot(chosen_child) \"\"\" # if RN is a leaf node # search all", "(~2.72x slower for 10x growth; expected 33x slower) # n = 5500 #", "margin = surface_area return margin def toString(self): upper_left = self.getUpperLeft() lower_right = self.getLowerRight()", "i % 4 == 3: upper_left = (100, 100) lower_right = (120, 120)", "if entry.getMBR().doesEnclose(mbr) == True: return True else: entries = entry.getChild().getEntries() for curr_entry in", "\"\"\" def isTraditionalLeafNode(self): is_traditional_leaf_node = self.getNumEntries() == 0 return is_traditional_leaf_node \"\"\" def isLeafNode(self):", "nodes, prune using enclosure/containment # w.r.t. reference rectangle, add children to priority queue,", "100), point7) node7 = RTreeNode(None, [], True) entry7 = RTreeEntry(mbr7, node7) node7.setEntry(entry7) tree.insert(entry7)", "= upper_left self.lower_right = lower_right def isRaw(self): return False def isComposite(self): return False", "= point result_mbr = RawMBR(upper_left, lower_right, point) return result_mbr def getContainedItem(self): return self.contained_item", "1149)), \\ ((800, 709, 871), (1390, 1402, 1548)), \\ ((433, 499, 483), (1300,", "602), (910, 1248, 1035)), \\ ((920, 974, 724), (1802, 1524, 1378)), \\ ((911,", "True overall_str_list = None if have_node_str == True: curr_leaf_status = \"-\" if node.isLeafNode()", "x2 - x1 return margin if self.getDimension() == 2: x1, y1 = upper_left", "100) # y2 = int(y1 + random.random() * 100) # z2 = int(z1", "overall_str_list = [] for entry in entries: child = entry.getChild() child_str = self.toNumChildrenStringHelper(child)", "self.getRootEntry().getChild(): return node else: return node.getParent() else: entries = node.getEntries() candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(entries,", "in start_rectangle_entries: close_descendant_entries = self.getRectangleCloseDescendants(start_rectangle_entry) for close_descendant_entry in close_descendant_entries: start_rectangle_to_close_ancestor_entries_dict[close_descendant_entry].append(start_rectangle_entry) return start_rectangle_to_close_ancestor_entries_dict def", "* multiplier + offset next_x2 = x2 * multiplier + offset next_y2 =", "(self.child_to_entry_dict)[child_node] def getChildren(self): return (self.child_to_entry_dict).keys() def getNumEntries(self): return len(self.child_to_entry_dict) def getNumChildren(self): return self.getNumEntries()", "return self.getNumEntries() def setParent(self, node): self.parent = node def isNonTraditionalLeafNode(self): is_non_traditional_leaf_node = (self.getParent()", "# image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2)) image.write(\"tree.png\") def main(): point1 = (30, 100, 0)", "mbr_pair_tagged_next_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for x in mbr_list_pair_tagged_candidate_distributions] combined_area_tagged_next_candidate_distributions = [(x[0][0].getArea() +", "not in [x.getChild().getNumEntries() == 0 for x in self.getEntries()]) is_leaf_node = self.getNumChildren() ==", "for curr_entry in entries: base_mbr = curr_entry.getMBR() curr_mbr = MBR.getEnlargedMBR(base_mbr, mbr) tagged_mbr_list.append((curr_mbr, curr_entry))", "True and lower_right_matches == True return result class CompositeMBR(MBR): def __init__(self, upper_left, lower_right,", "if x.getEntry().getMBR().isRaw() == True] for keep_node in keep_nodes: Q.append(keep_node) # only makes sense", "entry.getMBR().isRaw() == True: if entry.getMBR().doesEnclose(mbr) == True: partial_result.append(entry) else: entries = entry.getChild().getEntries() for", "left_value2 and right_value1 >= right_value2 if component_does_enclose == False: does_enclose = False break", "= self.getRootEntry().getChild() return self.toNumChildrenStringHelper(root) def toNumChildrenStringHelper(self, node): if node == None: return \"\"", "heapq.heappop(heap) elif len(internal_node_stack_deque) != 0: item = internal_node_stack_deque.popleft() # (priority,item) = heapq.heappop(heap) entry", "((945, 260, 1091), (1932, 332, 1133)), \\ ((262, 221, 872), (500, 279, 1521)),", "does_enclose = True for i in xrange(dimension): left_value1 = self.getUpperLeft()[i] left_value2 = mbr.getUpperLeft()[i]", "we have parent set correctly for a leaf, # which is not the", "mbr_group1 = [x.getMBR() for x in entry_group1] mbr_group2 = [x.getMBR() for x in", "our tree # has entry-aware nodes; made bug fix for adjustTree(); # fixed", "this entry, this should be okay leaf_node.removeEntry(entry) self.condenseTree(leaf_node) # root = self.getRootEntry().getChild() \"\"\"", "= root_entry def hasConsistentNonTraditionalLeafDepthValues(self): root = self.getRootEntry().getChild() curr_node = root depth = 0", "area1 = mbr1.getArea() area2 = mbr2.getArea() union_area = area1 + area2 - overlap_area", "i % 4 == 2: upper_left = (60, 60) lower_right = (80, 80)", "tree.getRootEntry().getChild() mbr1 = RawMBR(point1, (110, 200, 100), point1) node1 = RTreeNode(None, [], True)", "self.is_supernode def setToSuperNode(self, is_supernode): self.is_supernode = is_supernode def getSplitHistoryRootDimension(self): return self.split_history_root_dimension def setSplitHistoryRootDimension(self,", "0) def toDepthStringHelper(self, node, depth): if node == None: return \"\" entries =", "we have cliques # note that we don't necessarily need PythonMagick # note", "lower_right_point, result_mbr_list) return mbr @staticmethod def getAreaEnlargement(base_mbr, mbr): base_mbr_area = base_mbr.getArea() enlarged_mbr =", "enlarged_mbr.getArea() area_change = enlarged_mbr_area - base_mbr_area return area_change @staticmethod def doOverlap(mbr_a, mbr_b, without_borders", "= [x for x in tagged_area_values if x[0] == min_area] candidate_entries = [x[1]", "else: return (True, entry_collection1, entry_collection2, dimension) def xtreeTopologicalSplit(self, node, entry): m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode()", "entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) \"\"\" def rstarSplitNode(self, node, entry): curr_node = node E_overall", "depth, curr_depth): if node == None: return elif node.isLeafNode() == True: if depth", "overall_str_list = [] for entry in entries: child = entry.getChild() child_str = self.toLeafStatusStringHelper(child)", "m) candidate_distributions = None candidate_distributions = result[axis][0] + result[axis][1] mbr_list_pair_tagged_candidate_distributions = [(([y.getMBR() for", "* log(n)) time at worst; # assumes that rectangles are distinct # return", "len(entries) == 0: parent = entry.getChild().getParent() mbr = entry.getMBR() location = Point.toPoint(mbr) x,", "node.getNumEntries() != 0 else str(depth) overall_str_list = [curr_depth] else: overall_str_list = [] for", "area2 - overlap_area ovelap_ratio = None if union_area == 0: if mbr1.isEqualTo(mbr2) ==", "def setToSuperNode(self, is_supernode): self.is_supernode = is_supernode def getSplitHistoryRootDimension(self): return self.split_history_root_dimension def setSplitHistoryRootDimension(self, dim):", "node2.setParent(parent) else: next_root = RTreeNode(None, [entry1, entry2], False) self.getRootEntry().setChild(next_root) next_root.setEntry(self.getRootEntry()) node1.setParent(next_root) node2.setParent(next_root) pass", "depth = 0 while curr_node.isLeafNode() == False: curr_node = curr_node.getChildren()[0] depth = depth", "self.getRootEntry().getChild()) def chooseLeafHelper(self, entry, node): if node.isLeafNode() == True: if node == self.getRootEntry().getChild():", "= split_history_root_dimension self.is_supernode = is_supernode self.entry = entry def getEntry(self): return self.entry def", "l, [e, ee], True) ended_with_split2, resulting_entries_from_split = adjust_result if ended_with_split2 == True: e,", "2) return distance class RTreeNode: def __init__(self, parent, entries, is_leaf, entry = None,", "<= M and x >= m and (len(entries) - x) <= M and", "to add all-start-rectangles close-ancestor finding, # which for a well-formed r-tree, takes O(n", "for x in entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) \"\"\" def rstarSplitNode(self, node, entry): curr_node", "else: return (False, []) \"\"\" # assume item is in tree # returns", "next_entry = RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) conflict_x_tree.insert(next_entry) elif node.isLeafNode() == False: # if we", "for i in xrange(mbr_a.getDimension()): # a \"left\" comp_a1 = min(upper_left_a[i], lower_right_a[i]) # a", "on occasion, # if containment query for conflict x-tree returns entries matching_entries =", "min-pq class PriorityQueue: def __init__(self): self.heap = [] def push(self, item, priority): pair", "== None and self.getNumChildren() == 0) or (self.getNumChildren() != 0 and False not", "conflict_x_tree.delete(matching_entry) # if node is a leaf node, it has an actual rectangle", "1 + k for k in range(1, M - 2 * m +", "= (90, 100, 0) point8 = (110, 100, 0) curr_mbr1 = RawMBR((100, 100,", "overall_str_list = [node.getEntry().getMBR().toString()] # overall_str_list = [node.getEntry().getMBR().toString(), str(node)] else: overall_str_list = [] if", "enlarged_mbr_area = enlarged_mbr.getArea() area_change = enlarged_mbr_area - base_mbr_area return area_change @staticmethod def doOverlap(mbr_a,", "mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft() lower_right_b = mbr_b.getLowerRight() dimension = mbr_a.getDimension()", "y in x[1]]), x) for x in candidate_distributions] mbr_pair_tagged_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1])", "partner_entry) l, ll, e, ee = split_result return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [e, ee], True)", "= base_mbr.getMBRList() + [mbr] mbr = CompositeMBR(upper_left_point, lower_right_point, result_mbr_list) return mbr @staticmethod def", "x[1].getMarginValue() for x in upper_mbr_pairs] upper_margin_value_sum = sum(upper_margin_values) S_comp_value += upper_margin_value_sum S_comp_dict[i] =", "really necessary for entry in entries: curr_node = entry.getChild() node.addEntry(entry) # needed this", "# n = 20000 # 230.0411 seconds (~538x slower for 200x growth; expected", "entry = RTreeEntry(mbr, node) node.setEntry(entry) entries.append(entry) for i in xrange(1000): upper_left = (0,", "return False return True def toNumChildrenString(self): root = self.getRootEntry().getChild() return self.toNumChildrenStringHelper(root) def toNumChildrenStringHelper(self,", "node.getChildren() have_node_str = True overall_str_list = None if have_node_str == True: curr_depth =", "can temporarily look like leaf nodes # keep_nodes = [x for x in", "= [] self.doOverlapQueryHelper(mbr, self.getRootEntry(), partial_result, without_borders) return partial_result def doOverlapQueryHelper(self, mbr, entry, partial_result,", "if self.getRootEntry().getChild().getNumChildren() == 0: return [] reference_mbr = reference_entry.getMBR() root_entry = self.getRootEntry() root_node", "1089, 594)), \\ ((294, 238, 1036), (785, 378, 1963)), \\ ((803, 1054, 307),", "entry = entry_pq.pop() item = None if len(heap) != 0: (priority,item) = heapq.heappop(heap)", "- comp_1a for j in xrange(i + 1, self.getDimension()): comp_2a = upper_left[j] comp_2b", "offset next_y1 = y1 * multiplier + offset next_x2 = x2 * multiplier", "= int(y1 + random.random() * 100) # z2 = int(z1 + random.random() *", "CompositeMBR.makeMBR(mbr_group1) curr_overall_mbr2 = CompositeMBR.makeMBR(mbr_group2) for curr_entry in entry_group1: next_curr_node = curr_entry.getChild() if curr_entry", "== 3: upper_left = (100, 100) lower_right = (120, 120) \"\"\" denominator =", "mbr = entry.getMBR() tagged_enlargement_values = [(MBR.getAreaEnlargement(x, mbr), x) for x in mbr_list] enlargement_values", "= entry.getChild().getEntries() for curr_entry in entries: if MBR.doOverlap(curr_entry.getMBR(), mbr) == True: self.doContainmentQueryHelper(mbr, curr_entry,", "True) entry3 = RTreeEntry(mbr3, node3) node3.setEntry(entry3) tree.insert(entry3) mbr4 = RawMBR(point4, (110, 200, 100),", "for xtreeInsert(); # have supernode demotion when size decreases to or below M", "entry.getMBR().isRaw() == True: if MBR.doOverlap(entry.getMBR(), mbr, without_borders) == True: partial_result.append(entry) else: entries =", "entry_collection1, entry_collection2, dimension) def xtreeTopologicalSplit(self, node, entry): m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode()", "elif node.isLeafNode() == True: if depth != curr_depth: return False else: return True", "resulting_entries_from_split = [e, ee] next_root = RTreeNode(None, resulting_entries_from_split, False, self.getRootEntry()) l.setParent(next_root) ll.setParent(next_root) self.getRootEntry().setChild(next_root)", "priority, internal_entry = priority_tagged_internal_entry item = internal_entry internal_node_stack_deque.appendleft(item) # print \"conflict x-tree:\", conflict_x_tree.toString()", "entry = node.getParent().retrieveEntryForChild(node) \"\"\" entry = parent.retrieveEntryForChild(node) children = [x.getChild() for x in", "return self.root_entry def setRootEntry(self, root_entry): self.root_entry = root_entry def hasConsistentNonTraditionalLeafDepthValues(self): root = self.getRootEntry().getChild()", "M, m) entry_group1, entry_group2 = result parent = curr_node.getParent() \"\"\" if parent !=", "node7) node7.setEntry(entry7) tree.insert(entry7) mbr8 = RawMBR(point8, (110, 200, 100), point8) node8 = RTreeNode(None,", "def getSplitHistoryRootDimension(self): return self.split_history_root_dimension def setSplitHistoryRootDimension(self, dim): self.split_history_root_dimension = dim def getParent(self): return", "= curr_node.getChildren()[0] depth = depth + 1 return self.hasConsistentNonTraditionalLeafDepthValuesHelper(root, depth, 0) def hasConsistentNonTraditionalLeafDepthValuesHelper(self,", "calculation # note that we assume rectangles are unique for close-descendant # and", "print result print len(result) for entry_to_close_ancestor_entry_list_pair in result.items(): entry, close_ancestor_entry_list = entry_to_close_ancestor_entry_list_pair print", "enclosure/containment # w.r.t. reference rectangle, add children to priority queue, # ignore if", "= lower_right multiplier = 1 / (1.0 * 6.5) * 0.8 offset =", "= \"-\" if node.getNumEntries() != 0 else str(depth) overall_str_list = [curr_depth] else: overall_str_list", "def getComponent(self, d): return self.getVec()[d] def getIDValue(self): return self.id_value import string class RTree:", "entry) def rstarSplitNodeHelper(self, node, E_overall, entry): # prev_leaf_status = node.isLeafNode() prev_leaf_status = None", "(10, 10) mbr = RawMBR(upper_left, lower_right, None) node = RTreeNode(None, [], True) entry", "node2 = RTreeNode(parent, entry_collection2, None, entry2) entry2.setChild(node2) for curr_entry in entry_collection1: curr_entry.getChild().setParent(node1) for", "a leaf node # search all entries of RN to find E.mbr #", "occasionally # note that M of two works import sys # import PythonMagick", "is found # remove E from L # call algorithm condenseTree(L) # if", "if self.getRootEntry().getChild().getNumChildren() == 0: root_node = RTreeNode(None, [], True) root_mbr = CompositeMBR(None, None,", "(9, 10, 10), (3, 10, 10), (1, 10, 10), (3, 10, 10)] #", "tree.toString() # tree.delete(entry8) # tree.insert(entry1) \"\"\" tree.delete(entry1) tree.delete(entry2) tree.delete(entry3) tree.delete(entry4) tree.delete(entry5) tree.delete(entry6) tree.delete(entry7)", "overall_str def toDepthString(self): root = self.getRootEntry().getChild() return self.toDepthStringHelper(root, 0) def toDepthStringHelper(self, node, depth):", "first_entry, second_entry = resulting_entries_from_split partner_entry = second_entry if have_resulting_second_entry_from_split == True and is_first_call_after_first_pass", "cover E.mbr # follow the corresponding subtrees unti lthe leaf L that contains", "upper_left self.lower_right = lower_right self.id_value = id_value def getUpperLeft(self): return self.upper_left def getLowerRight(self):", "tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) self.condenseTreeHelper(node.getParent(), Q) return # not tested # returns entries", "2 x1 = 0 y1 = 0 x2 = 47 y2 = 60", "+ [entry])) return self.rstarSplitNodeHelper(node, E_overall, entry) def rstarSplitNodeHelper(self, node, E_overall, entry): # prev_leaf_status", "lower_right = (40, 40) mbr = RawMBR(upper_left, lower_right, None) node = RTreeNode(None, [],", "chooseEntriesWithMinimalAreaEnlargement(self, entries, entry): mbr_to_entry_dict = {} for i in range(len(entries)): curr_entry = entries[i]", "d): return self.getVec()[d] def getIDValue(self): return self.id_value import string class RTree: def __init__(self):", "(priority,item) heapq.heappush(heap,pair) elif curr_mbr.isRaw() == False: if curr_mbr.doesEnclose(reference_mbr) == False and reference_mbr.doesEnclose(curr_mbr) ==", "== 0 return is_traditional_leaf_node \"\"\" def isLeafNode(self): # is_leaf_node = (self.getParent() == None", "upper_left = (60, 60) lower_right = (80, 80) elif i % 4 ==", "in entries[0 : 4]: # print \"supernodes:\", [x for x in tree.getNodes() if", "= upper_left[0] x2 = lower_right[0] margin = x2 - x1 return margin if", "is_equal class RawMBR(MBR): def __init__(self, upper_left, lower_right, contained_item): MBR.__init__(self, upper_left, lower_right) self.contained_item =", "= 2 and M = 4 # n = 1,000 works in 2.996", "if curr_entry.getMBR().doesEnclose(mbr) == True: self.doEnclosureQueryHelper(mbr, curr_entry, partial_result) def doEnclosureQueryWithEarlyStopping(self, mbr): result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr,", "node.isLeafNode() == False: if have_resulting_second_entry_from_split == True: if (parent.getNumChildren() + 1) <= parent.getMaximumNumEntriesPerNode():", "= 2 perimeter_x = next_x + offset perimeter_y = next_y + offset +", "curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return else: # raise Exception() # print \"decision", "== False: return False return True def toNumChildrenString(self): root = self.getRootEntry().getChild() return self.toNumChildrenStringHelper(root)", ": window_left_sizes[j]], upper_sorted_entries[window_left_sizes[j] : ]) for j in xrange(len(window_left_sizes))] curr_tuple = (low_distributions, upper_distributions)", "Q is in order of low-level to high-level; # wish to insert using", "term = 2 * term1 * term2 surface_area += term margin = surface_area", "print \"no split\" return (RTree.NO_SPLIT, [node]) if node.isLeafNode() == True: # split just", "= [x.getChild() for x in partner_entries] partner_mbr_list = [x.getMBR() for x in partner_entries]", "reference mbr # and associated mbr is not contained within reference mbr continue", "next_node) next_node.setEntry(next_entry) conflict_x_tree.insert(next_entry) elif node.isLeafNode() == False: # if we made it this", "= RTreeNode(None, [], True) next_entry = RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) conflict_x_tree.insert(next_entry) elif node.isLeafNode() ==", "else: return node.getParent() else: entries = node.getEntries() candidate_entries = None # if node.isLeafNode()", "MBR.findOverlapArea(mbr1, mbr2) area1 = mbr1.getArea() area2 = mbr2.getArea() union_area = area1 + area2", "None if have_node_str == True: curr_leaf_status = str(node.getNumChildren()) overall_str_list = [curr_leaf_status] else: overall_str_list", "(True, entry_collection3, entry_collection4, dimension) else: return (True, entry_collection1, entry_collection2, dimension) def xtreeTopologicalSplit(self, node,", "= [x.getUpperLeft() for x in component_mbr_list] lower_right_points = [x.getLowerRight() for x in component_mbr_list]", "if node.isUnderfull() == True: # print \"underfull\" parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) # don't", "returns entries # does intersection query def doOverlapQuery(self, mbr, without_borders = False): partial_result", "= [x.getLowerRight() for x in mbr_list] points = upper_left_points + lower_right_points min_components =", "[(([y.getMBR() for y in x[0]], [y.getMBR() for y in x[1]]), x) for x", "False: overall_str_list = [node.getEntry().getMBR().toString()] # overall_str_list = [node.getEntry().getMBR().toString(), str(node)] else: overall_str_list = []", "self.vec def getComponent(self, d): return self.getVec()[d] def getIDValue(self): return self.id_value import string class", "mbr.getUpperLeft() def getVec(self): return self.vec def getComponent(self, d): return self.getVec()[d] def getIDValue(self): return", "self.getNumEntries() >= self.getMaximumNumEntriesPerNode() def isUnderfull(self): return self.getNumEntries() < self.getMinimumNumEntriesPerNode() def retrieveEntryForChild(self, node): return", "if depth != 0: pass color_choice = depth % 3 color = None", "x in partner_entries] partner_tight_overall_mbr = CompositeMBR.makeMBR(partner_mbr_list) partner_entry.setMBR(partner_tight_overall_mbr) if have_resulting_second_entry_from_split == True: parent.removeEntry(entry) if", "MBR.doOverlap(curr_entry.getMBR(), entry.getMBR()) == True: result = self.findLeafHelper(entry, next_entry) if result == True: return", "# if we increase n and do not increase domains and # high", "the r-tree # is not acyclic and we have cliques # note that", "return node_list \"\"\" def getUnionArea(self): pass \"\"\" # takes O(log(n)) time on average", "\\ ((332, 886, 493), (822, 1305, 1149)), \\ ((800, 709, 871), (1390, 1402,", "def isComposite(self): return True @staticmethod def makeMBR(component_mbr_list): upper_left_points = [x.getUpperLeft() for x in", "node m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode() M = self.getRootEntry().getChild().getMaximumNumEntriesPerNode() axis = RTree.rstarChooseSplitAxis(E_overall, M, m) result", "if the root has only one child (and it is not a leaf)", "which we do do occasionally # note that M of two works import", "# tree.delete(entry1) print tree.toString() # tree.delete(entry8) # tree.insert(entry1) \"\"\" tree.delete(entry1) tree.delete(entry2) tree.delete(entry3) tree.delete(entry4)", "= mbr1.getArea() area2 = mbr2.getArea() union_area = area1 + area2 - overlap_area ovelap_ratio", "return overall_str def chooseEntriesWithMinimalOverlapEnlargement(self, entries, entry): mbr_to_entry_dict = {} for i in range(len(entries)):", "ll = ee.getChild() if (self.getRootEntry().getChild().getNumEntries() + 1) <= self.getRootEntry().getChild().getMaximumNumEntriesPerNode(): self.getRootEntry().getChild().addEntry(ee) ll.setParent(self.getRootEntry().getChild()) else: split_result", "359, 290), (579, 950, 700)), \\ ((297, 196, 750), (1085, 718, 1259)), \\", "4 == 1: upper_left = (20, 20) lower_right = (40, 40) elif i", "= curr_entry.getMBR() mbr = entry.getMBR() next_mbr = MBR.getEnlargedMBR(curr_mbr, mbr) node.getEntry().setMBR(next_mbr) # this parent-setting", "has zero entries after removing this entry, this should be okay leaf_node.removeEntry(entry) self.condenseTree(leaf_node)", "node else: return node.getParent() else: entries = node.getEntries() candidate_entries = None # if", "its only child pass def condenseTree(self, leaf_node): Q = [] self.condenseTreeHelper(leaf_node, Q) #", "if curr_mbr.doesEnclose(reference_mbr) == False and reference_mbr.doesEnclose(curr_mbr) == False: continue # item = curr_entry", "= (70, 100, 0) point6 = (80, 100, 0) point7 = (90, 100,", "close-descendant # and close-ancestor finding; the assumption is necessary # to make strong", "(1390, 1402, 1548)), \\ ((433, 499, 483), (1300, 1330, 1055))] \"\"\" # n", "= [] for start_rectangle_entry in start_rectangle_entries: close_descendant_entries = self.getRectangleCloseDescendants(start_rectangle_entry) for close_descendant_entry in close_descendant_entries:", "root = self.getRootEntry().getChild() \"\"\" if root.getNumChildren() == 1: # shorten tree entries =", "[x.getLowerRight() for x in component_mbr_list] points = upper_left_points + lower_right_points min_components = []", "= RTreeNode(parent, entry_collection2, None, entry2) entry2.setChild(node2) for curr_entry in entry_collection1: curr_entry.getChild().setParent(node1) for curr_entry", "= entry.getChild() child_str = self.toEntriesArePresentStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" +", "do_overlap = True if without_borders == True: do_overlap = do_overlap and comp_a1 <", "0), (100, 100, 0), (100, 100, 0)) curr_mbr2 = RawMBR((50, 100, 0), (50,", "= (100 * math.log(100, 2)) ** (1 / 3.0) k = 1 #", "True) entry5 = RTreeEntry(mbr5, node5) node5.setEntry(entry5) tree.insert(entry5) mbr6 = RawMBR(point6, (110, 200, 100),", "return (node1, node2, entry1, entry2) @staticmethod def rstarPreadjustTree(self, leaf_node): node = leaf_node parent", "l, ll, e, ee = split_result adjust_result = RTree.rstarAdjustTree(self, l, [e, ee], True)", "or leaves; # assumes that rectangles are distinct def getAllRectangleCloseAncestors(self): start_rectangle_nodes = [x", "next_node = RTreeNode(None, [], True) next_entry = RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) curr_x_tree.insert(next_entry) union_area =", "# Q is in order of low-level to high-level; # wish to insert", "l, ll, e, ee = split_result return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [e, ee], True) else:", "== min_combined_area_value] next_next_candidates = [x[1] for x in matching_combined_area_tagged_next_candidate_distributions] chosen_distribution_pair = next_next_candidates[0] return", "leaf, # which is not the case when we initially insert parent =", "curr_tuple = (low_distributions, upper_distributions) result_list.append(curr_tuple) return result_list @staticmethod def rstarChooseSplitAxis(entries, M, m): result", "return result def doEnclosureQueryWithEarlyStoppingHelper(self, mbr, entry): if entry.getMBR().isRaw() == True: if entry.getMBR().doesEnclose(mbr) ==", "else [node.getEntry().getMBR().toString()] # overall_str_list = [] if node.getNumChildren() == 0 else [node.getEntry().getMBR().toString(), str(node)]", "result = tree.getRectangleCloseDescendants(entry8) print result result = tree.getAllRectangleCloseAncestors() print result print len(result) for", "curr_node.getParent() \"\"\" if parent != None and (node in parent.getChildren()): pass \"\"\" node1", "= adjust_result if ended_with_split2 == True: e, ee = resulting_entries_from_split l = e.getChild()", "in entries: child = entry.getChild() child_str = self.toLeafStatusStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str", "if node == None: return \"\" entries = node.getEntries() children = node.getChildren() have_node_str", "high inter-group overlap means maximal disjointedness # is not going to be good", "\"\"\" chosen_entry = candidate_entries[0] chosen_child = chosen_entry.getChild() return self.rstarChooseLeafHelper(entry, chosen_child) def insert(self, entry):", "False, self.getRootEntry()) l.setParent(next_root) ll.setParent(next_root) self.getRootEntry().setChild(next_root) else: pass MAX_OVERLAP_RATIO = 0.2 def xtreeSplitNode(self, node,", "== True: # could have a safe path to a leaf where the", "RawMBR(point7, (110, 200, 100), point7) node7 = RTreeNode(None, [], True) entry7 = RTreeEntry(mbr7,", "for x in tagged_enlargement_values] min_enlargement_value = min(enlargement_values) candidate_tagged_enlargement_values = [x for x in", "* multiplier + offset next_y2 = y2 * multiplier + offset \"\"\" #", "= CompositeMBR(upper_left_point, lower_right_point, result_mbr_list) return mbr @staticmethod def getAreaEnlargement(base_mbr, mbr): base_mbr_area = base_mbr.getArea()", "item, priority): pair = (priority,item) heapq.heappush(self.heap,pair) def pop(self): (priority,item) = heapq.heappop(self.heap) return item", "4 == 3: upper_left = (100, 100) lower_right = (120, 120) \"\"\" denominator", "True], tree.getRootEntry().getChild() # tree2.draw() print len(tree2.getNodes()) import time time1 = time.time() result =", "# prev_leaf_status = node.isLeafNode() prev_leaf_status = None curr_node = node m = self.getRootEntry().getChild().getMinimumNumEntriesPerNode()", "entry == ignore_entry: # ignore node if its entry matches the ignore entry", "y image.strokeColor(\"none\") image.fillColor(\"black\") center_x = next_x + offset center_y = next_y + offset", "it is not a leaf) # remove the root # set as new", "overlap_area_sum = sum([x.getArea() for x in mbr_list]) for curr_mbr in mbr_list: next_mbr =", "# set priority correctly and add to priority queue curr_node = curr_entry.getChild() curr_mbr", "are for upper-left's in (100, 10100) and # lower-right's in (ul_i, ul_i +", "in [x.getChild().getNumEntries() == 0 for x in self.getEntries()]) return is_non_traditional_leaf_node \"\"\" def isTraditionalLeafNode(self):", "updated on 2016-11-06 to add single-start-rectangle-based # close-descendant finding that takes O(log(n)) time", "def rstarSplitNode(self, node, entry): curr_node = node E_overall = list(set(curr_node.getEntries() + [entry])) return", "candidate_entries = self.chooseEntriesWithMinimalOverlapEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(candidate_entries, entry) if", "== True] for tagged_overlapped_mbr in tagged_overlapped_mbr_list: curr_mbr, curr_entry = tagged_overlapped_mbr curr_node = curr_entry.getChild()", "% 4 == 1: upper_left = (20, 20) lower_right = (40, 40) elif", "next_result = (entry_group1, entry_group2, axis, False) return next_result def xtreeSupernodeInsert(self, node, entries): if", "tree # returns a node, which can be None if no match is", "False: continue # item = curr_entry # internal_node_stack_deque.appendleft(item) priority = (first_priority_component, second_priority_component) priority_tagged_internal_entry", "= root_mbr.isRaw() root_mbr_is_contained = reference_mbr.doesEnclose(root_mbr) root_mbr_area = root_mbr.getArea() first_priority_component = 0 if root_mbr_is_contained", "node1 = RTreeNode(None, [], True) entry1 = RTreeEntry(mbr1, node1) node1.setEntry(entry1) tree.insert(entry1) mbr2 =", "* math.log(20000, 2)) ** (1 / 3.0) / denominator)) # for n =", "lower_right_points min_components = [] max_components = [] for i in xrange(component_mbr_list[0].getDimension()): components =", "if multi_overlap_ratio <= RTree.MAX_OVERLAP_RATIO: node.setToSuperNode(False) elif node.isUnderfull() == True: \"\"\" if node.isUnderfull() ==", "# y2 = int(y1 + random.random() * 100) # z2 = int(z1 +", "l, ll, e, ee = split_result return tree.adjustTree(tree, l, [e, ee], True, False)", "True] keep_nodes = [x for x in self.getNodesForNode(node) if x.getEntry().getMBR().isRaw() == True] for", "if node.getNumChildren() == 0: # if node.isNonTraditionalLeafNode() == True: if node.isLeafNode() == True", "if split_status == RTree.SPLIT: # added_node.setParent(node) if node.getNumChildren() > node.getMaximumNumEntriesPerNode(): split_result = self.xtreeSplitNode(node,", "else: return False else: entries = curr_entry.getChild().getEntries() for next_entry in entries: if MBR.doOverlap(curr_entry.getMBR(),", "a root node if self.getRootEntry().getChild().getNumChildren() == 0: root_node = RTreeNode(None, [], True) root_mbr", "[node]) elif split_status == RTree.SUPERNODE: pass # print \"no split\" return (RTree.NO_SPLIT, [node])", "findLeafHelper(self, entry, curr_entry): \"\"\" if node.isLeafNode() == False: curr_mbr = entry.getMBR() entries =", "upper_left, lower_right, mbr_list): MBR.__init__(self, upper_left, lower_right) self.mbr_list = mbr_list def getMBRList(self): return self.mbr_list", "# is_leaf_node = (self.getParent() == None and self.getNumChildren() == 0) or (self.getNumChildren() !=", "self.upper_left def getLowerRight(self): return self.lower_right def getIDValue(self): return self.id_value class Point: def __init__(self,", "RN is an internal node # find all entries of RN that cover", "for a well-formed r-tree, takes O(n * log(n)) time; # these times involve", "entry.getMBR() tagged_mbr_list = [] for curr_entry in entries: base_mbr = curr_entry.getMBR() curr_mbr =", "low_sorted_entries[window_left_sizes[j] : ]) for j in xrange(len(window_left_sizes))] upper_sorted_entries = entries[ : ] upper_sorted_entries.sort(key", "return [] reference_mbr = reference_entry.getMBR() root_entry = self.getRootEntry() root_node = root_entry.getChild() root_mbr =", "if have_resulting_second_entry_from_split == True: partner_node = partner_entry.getChild() partner_entries = partner_node.getEntries() partner_children = [x.getChild()", "is a leaf node, it has an actual rectangle # decide whether to", "* y2 + offset) if depth != 0: pass color_choice = depth %", "tree2.delete(entry) pass # print tree.toString() result = tree.getRectangleCloseDescendants(entry8) print result result = tree.getAllRectangleCloseAncestors()", "mbr.getLowerRight() x1, y1 = upper_left x2, y2 = lower_right multiplier = 1 /", "math.log(10000, 2)) ** (1 / 3.0) / denominator)) # for n = 10000", "heapq.heappush(heap,pair) priority_tagged_internal_entries.sort(key = lambda x: x[0], reverse = True) for priority_tagged_internal_entry in priority_tagged_internal_entries:", "* 100) # y2 = int(y1 + random.random() * 100) # z2 =", "+ radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y)) else: image.strokeColor(color) image.fillColor(\"none\") image.strokeWidth(4) image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2,", "mbr = entry.getMBR() tagged_enlargement_values = [(MBR.findOverlapArea(x, mbr), x) for x in mbr_list] enlargement_values", "entry, node): split_status = None next_mbr = None if True: # if node.getNumChildren()", "attempt an overlap-minimal split # updated on 2016-11-03 to re-structure and modify adjustTree();", "= self.getUpperLeft() lower_right1 = self.getLowerRight() upper_left2 = mbr.getUpperLeft() lower_right2 = mbr.getLowerRight() is_equal =", "None: raise Exception() \"\"\" # print tree.toString() # for entry in entries[0 :", "CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return else: # raise Exception() # print \"decision point\" \"\"\" if", "time.time() time_diff = time2 - time1 print \"time difference:\", time_diff, \"seconds\" # raise", "2 next_x1, next_y1 = (multiplier * x1 + offset, multiplier * y1 +", "curr_mbr2b = RawMBR((50, 50, 0), (100, 100, 0), HyperRectangle((50, 50, 0), (100, 100,", "* 6.5) * 0.8 offset = (1536 * 0.2) / 2 next_x1, next_y1", "None: return elif node.isLeafNode() == True: if depth != curr_depth: return False else:", "node = entry.getChild() mbr = entry.getMBR() if mbr.doesEnclose(reference_mbr) == False and reference_mbr.doesEnclose(mbr) ==", "entry.setMBR(tight_overall_mbr) @staticmethod def rstarAdjustTree(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split): return tree.rstarAdjustTreeHelper(tree, node, resulting_entries_from_split, have_resulting_second_entry_from_split) @staticmethod", "entry_group1: next_curr_node = curr_entry.getChild() if curr_entry != entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node1) for curr_entry in", "mbr # and associated mbr is not contained within reference mbr continue if", "priority = (first_priority_component, second_priority_component) if curr_mbr.isRaw() == True: priority = -1 * curr_mbr_area", "16 # n = 6,000 works in 56.672 sec. for pypy with m", "# ignore if contained rectangle is contained by a rectangle in conflict x-tree,", "entries = node.getEntries() priority_tagged_internal_entries = [] for curr_entry in entries: # set priority", "= (first_priority_component, second_priority_component) priority_tagged_internal_entry = (priority, curr_entry) priority_tagged_internal_entries.append(priority_tagged_internal_entry) # item = curr_entry #", "in 2.996 sec. for pypy with m = 2 and M = 4", "point8) node8 = RTreeNode(None, [], True) entry8 = RTreeEntry(mbr8, node8) node8.setEntry(entry8) # problem", "= entry.getChild() child_str = self.toNumChildrenStringHelper(child) curr_str = child_str overall_str_list.append(curr_str) overall_str = \"(\" +", "with mbr:\", entry.getMBR().toString() # print \"tree, currently:\", tree.toString() # tree2.delete(entry) pass # print", "entries: if curr_entry.getMBR().doesEnclose(mbr) == True: result = self.doEnclosureQueryWithEarlyStoppingHelper(mbr, curr_entry) if result == True:", "\\ ((879, 319, 789), (1877, 744, 791)), \\ ((1081, 1056, 1020), (1708, 1075,", "draw, 0) im.save(\"tree.png\", \"PNG\") \"\"\" # image = PythonMagick.Image(PythonMagick.Geometry(\"768x768\"), \"white\") image = PythonMagick.Image(PythonMagick.Geometry(\"1536x1536\"),", "+ offset \"\"\" # image.draw(PythonMagick.DrawableRectangle(next_x1, next_y1, next_x2, next_y2)) image.write(\"tree.png\") def main(): point1 =", "x in next_candidate_distributions] mbr_pair_tagged_next_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for x in mbr_list_pair_tagged_candidate_distributions] combined_area_tagged_next_candidate_distributions", "def makeMBR(component_mbr_list): upper_left_points = [x.getUpperLeft() for x in component_mbr_list] lower_right_points = [x.getLowerRight() for", "(100, 100, 0), (100, 100, 0)) curr_mbr2 = RawMBR((50, 100, 0), (50, 100,", "with m = 8 and M = 16 # n = 6,000 works", "= [x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) @staticmethod def rstarAdjustTree(tree,", "lower_right2 = mbr.getLowerRight() is_equal = upper_left1 == upper_left2 and lower_right1 == lower_right2 return", "= (60, 100, 0) point5 = (70, 100, 0) point6 = (80, 100,", "reference_mbr.doesEnclose(mbr) == False: # ignore node if associated mbr does not enclose reference", "50, 0), (100, 100, 0), HyperRectangle((50, 50, 0), (100, 100, 0), 1)) tree", "\"\"\" print tree.toString() tree2 = RTree() import random entries = [] # lower_rights", "= lower_right def isRaw(self): return False def isComposite(self): return False def getUpperLeft(self): return", "886, 493), (822, 1305, 1149)), \\ ((800, 709, 871), (1390, 1402, 1548)), \\", "entries, # which we do do occasionally # note that M of two", "RTreeNode(None, [], True) next_entry = RTreeEntry(next_mbr, next_node) next_node.setEntry(next_entry) curr_x_tree.insert(next_entry) union_area = curr_x_tree.getUnionArea() multi_overlap_ratio", "def draw(tree, entries, image, depth): for entry in entries: RTreeEntry.drawHelper(tree, entry, image, depth)", "RTreeNode(parent, entry_collection2, None, entry2) entry2.setChild(node2) for curr_entry in entry_collection1: curr_entry.getChild().setParent(node1) for curr_entry in", "RTreeEntry(next_mbr, None) next_root = RTreeNode(None, [entry1, entry2], None, next_root_entry) next_root_entry.setChild(next_root) self.setRootEntry(next_root_entry) node1.setParent(next_root) node2.setParent(next_root)", "* 0.8 offset = (1536 * 0.2) / 2 next_x1, next_y1 = (multiplier", "mbr5 = RawMBR(point5, (110, 200, 100), point5) node5 = RTreeNode(None, [], True) entry5", "curr_entries = node.getEntries() entry = parent.retrieveEntryForChild(node) children = [x.getChild() for x in curr_entries]", "n = 10000 # 84.222 seconds (~197x slower for 100x growth; expected 664x", "entries = entry.getChild().getEntries() for curr_entry in entries: if curr_entry.getMBR().doesEnclose(mbr) == True: result =", "in added_nodes]) # print \"supernode #1\" return (RTree.SUPERNODE, [node]) elif split_status == RTree.SUPERNODE:", "\") + \")\" return overall_str def toDepthString(self): root = self.getRootEntry().getChild() return self.toDepthStringHelper(root, 0)", "= [root_entry] RTreeEntry.draw(self, entries, image, 0) \"\"\" image.strokeColor(\"orange\") image.fillColor(\"none\") image.strokeWidth(4) multiplier = 3", "xrange(1000): upper_left = (0, 0) lower_right = (10, 10) mbr = RawMBR(upper_left, lower_right,", "len(candidate_entries) != 1: candidate_entries = self.chooseEntriesWithMinimalAreaEnlargement(candidate_entries, entry) if len(candidate_entries) != 1: candidate_entries =", "True: result = self.findLeafHelper(entry, next_entry) if result == True: return result return False", "# min-pq class PriorityQueue: def __init__(self): self.heap = [] def push(self, item, priority):", "partner_entry) l, ll, e, ee = split_result return tree.adjustTree(tree, l, [e, ee], True,", "supernode demotion when size decreases to or below M # updated on 2016-11-06", "else: overall_str_list = [] for entry in entries: child = entry.getChild() child_str =", "\"\"\" node1 = RTreeNode(parent, entry_group1, prev_leaf_status) node2 = RTreeNode(parent, entry_group2, prev_leaf_status) for curr_entry", "== 0) or (self.getNumChildren() != 0 and False not in [x.getChild().getNumEntries() == 0", "True overall_str_list = None if have_node_str == True: curr_leaf_status = \"-\" if (node.getParent()", "that takes O(log(n)) time on average # for start rectangle taken from set", "elif i % 4 == 3: upper_left = (100, 100) lower_right = (120,", "class RTreeNode: def __init__(self, parent, entries, is_leaf, entry = None, split_history_root_dimension = None,", "(node.getParent() != None and node in node.getParent().getChildren())) == False else \"+\" overall_str_list =", "for x in mbr_list] lower_right_points = [x.getLowerRight() for x in mbr_list] points =", "node.isLeafNode() == True: candidate_entries = self.chooseEntriesWithMinimalOverlapEnlargement(entries, entry) if len(candidate_entries) != 1: candidate_entries =", "node): if node == None: return \"\" entries = node.getEntries() children = node.getChildren()", "= curr_node.getParent() \"\"\" if parent != None and (node in parent.getChildren()): pass \"\"\"", "have_node_str == True: curr_depth = \"-\" if node.getNumEntries() != 0 else str(depth) overall_str_list", "node): self.child = node @staticmethod def draw(tree, entries, image, depth): for entry in", "next_y + offset + radius image.draw(PythonMagick.DrawableCircle(center_x, center_y, perimeter_x, perimeter_y)) children = [x.getChild() for", "entries def doContainmentQuery(self, mbr): partial_result = [] self.doContainmentQueryHelper(mbr, self.getRootEntry(), partial_result) return partial_result def", "= [] for entry in entries: child = entry.getChild() child_str = self.toEntriesArePresentStringHelper(child) curr_str", "elif curr_mbr.isRaw() == False: if curr_mbr.doesEnclose(reference_mbr) == False and reference_mbr.doesEnclose(curr_mbr) == False: continue", "node is a leaf node, it has an actual rectangle # decide whether", "child = entry.getChild() child_str = self.toDepthStringHelper(child, depth + 1) curr_str = child_str overall_str_list.append(curr_str)", "start_rectangle_entries: start_rectangle_to_close_ancestor_entries_dict[start_rectangle_entry] = [] for start_rectangle_entry in start_rectangle_entries: close_descendant_entries = self.getRectangleCloseDescendants(start_rectangle_entry) for close_descendant_entry", "= self.getRootEntry().getChild() return self.toDepthStringHelper(root, 0) def toDepthStringHelper(self, node, depth): if node == None:", "note that our tree # has entry-aware nodes; made bug fix for adjustTree();", "of root; # also, we implement delete(); note that our tree # has", "= [x.getMBR() for x in curr_entries] tight_overall_mbr = CompositeMBR.makeMBR(mbr_list) entry.setMBR(tight_overall_mbr) return else: #", "split_status == RTree.SPLIT: # added_node.setParent(node) if node.getNumChildren() > node.getMaximumNumEntriesPerNode(): split_result = self.xtreeSplitNode(node, entry)", "have cliques # note that we don't necessarily need PythonMagick # note that", "0 else: upper_left_a = mbr_a.getUpperLeft() lower_right_a = mbr_a.getLowerRight() upper_left_b = mbr_b.getUpperLeft() lower_right_b =", "= 0 x2 = 47 y2 = 60 next_x1 = x1 * multiplier", "max_comp_value = max(components) min_components.append(min_comp_value) max_components.append(max_comp_value) upper_left_point = tuple(min_components) lower_right_point = tuple(max_components) result_mbr_list =", "entry def getEntry(self): return self.entry def setEntry(self, entry): self.entry = entry def isSuperNode(self):", "for start_rectangle_entry in start_rectangle_entries: start_rectangle_to_close_ancestor_entries_dict[start_rectangle_entry] = [] for start_rectangle_entry in start_rectangle_entries: close_descendant_entries =", "heapq from collections import deque # min-pq class PriorityQueue: def __init__(self): self.heap =", "== None: raise Exception() \"\"\" # print tree.toString() # for entry in entries[0", "def xtreeSupernodeInsert(self, node, entries): if node.isSuperNode() == False: node.setToSuperNode(True) # questionable if this", "encountered\" parent = node.getParent() parent.removeEntry(parent.retrieveEntryForChild(node)) Q.append(node) # raise Exception() if node.getNumChildren() <= 1:", "for x in d_S_pairs]) min_S_value_d_S_pair_candidates = [x for x in d_S_pairs if x[1]", "this curr_node.setParent(node) \"\"\" entries = node.getEntries() mbr_list = [x.getMBR() for x in entries]", "its entry matches the ignore entry continue if node.isLeafNode() == True: # could", "[x[1] for x in matching_overlap_value_tagged_candidate_distributions] if len(matching_overlap_value_tagged_candidate_distributions) > 1: next_candidate_distributions = next_next_candidates mbr_list_pair_tagged_candidate_distributions", "= True overall_str_list = None if have_node_str == True: curr_depth = \"-\" if", "(1708, 1075, 1542)), \\ ((358, 815, 372), (761, 1089, 594)), \\ ((294, 238,", "ee], True) else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(), [entry], False) else: return RTree.rstarAdjustTreeHelper(tree, node.getParent(), resulting_entries_from_split,", "enough to cut down branches explored; # to counter saturation, domain has to", "None, entry2) entry2.setChild(node2) for curr_entry in entry_collection1: curr_entry.getChild().setParent(node1) for curr_entry in entry_collection2: curr_entry.getChild().setParent(node2)", "children = node.getChildren() have_node_str = True overall_str_list = None if have_node_str == True:", "True return False # returns entries def doContainmentQuery(self, mbr): partial_result = [] self.doContainmentQueryHelper(mbr,", "and node in node.getParent().getChildren())) == False else \"+\" overall_str_list = [curr_leaf_status] else: overall_str_list", "tree.toString() # tree.delete(entry1) print tree.toString() # tree.delete(entry8) # tree.insert(entry1) \"\"\" tree.delete(entry1) tree.delete(entry2) tree.delete(entry3)", "[]) else: parent = node.getParent() curr_entries = node.getEntries() entry = None if node.getParent()", "self.getLowerRight() == mbr.getLowerRight() result = upper_left_matches == True and lower_right_matches == True return", "== 0: if mbr1.isEqualTo(mbr2) == True: overlap_ratio = 1 else: overlap_ratio = 0", "= [] if node.getNumChildren() == 0 else [node.getEntry().getMBR().toString()] # overall_str_list = [] if", "for x in entry_collection2] # this line presumes that we have parent set", "internal nodes can temporarily look like leaf nodes # keep_nodes = [x for", "!= entry: curr_node.removeEntry(curr_entry) next_curr_node.setParent(node1) for curr_entry in entry_group2: next_curr_node = curr_entry.getChild() if curr_entry", "b \"right\" comp_b2 = max(upper_left_b[i], lower_right_b[i]) # print comp_a1, comp_a2, comp_b1, comp_b2 #", "node4.setEntry(entry4) tree.insert(entry4) mbr5 = RawMBR(point5, (110, 200, 100), point5) node5 = RTreeNode(None, [],", "for x in entry_collection2] mbr1 = CompositeMBR.makeMBR(mbr_collection1) mbr2 = CompositeMBR.makeMBR(mbr_collection2) overlap_area = MBR.findOverlapArea(mbr1,", "(90, 100, 0) point8 = (110, 100, 0) curr_mbr1 = RawMBR((100, 100, 0),", "did_find_leaf = self.findLeaf(entry) child_node = entry.getChild() # root node never has a raw", "def toString(self): root = self.getRootEntry().getChild() return self.toStringHelper(root) def toStringHelper(self, node): if node ==", "partial_result = [] self.doEnclosureQueryHelper(mbr, self.getRootEntry(), partial_result) return partial_result def doEnclosureQueryHelper(self, mbr, entry, partial_result):", "# if len(tree.getNodes()) != 0: # print \"removing entry with mbr:\", entry.getMBR().toString() #", "node.isLeafNode() == True: if depth != curr_depth: return False else: return True else:", "max_components.append(max_comp_value) upper_left_point = tuple(min_components) lower_right_point = tuple(max_components) result_mbr = CompositeMBR(upper_left_point, lower_right_point, component_mbr_list) return", "(len(entries) - x) >= m] window_size_pairs = [(window_left_sizes[i], len(entries) - window_left_sizes[i]) for i", "node.getMinimumNumEntriesPerNode(): return (False, None, None, dimension) else: return (True, entry_collection3, entry_collection4, dimension) else:", "2)) ** (1 / 3.0) / denominator)) # for n = 14500 #", "a node exists # def delete(self, E, RN): def findLeaf(self, entry): return self.findLeafHelper(entry,", "continue # kick out close descendant candidates on occasion, # if containment query", "comp_a1 <= comp_b2 and comp_a2 >= comp_b1 if do_overlap == False: break return", "self.getNumChildren() == 0) or (self.getNumChildren() != 0 and False not in [x.getChild().getNumEntries() ==", "] low_sorted_entries.sort(key = lambda x: x.getMBR().getUpperLeft()[i]) low_distributions = [(low_sorted_entries[ : window_left_sizes[j]], low_sorted_entries[window_left_sizes[j] :", "has entry-aware nodes; made bug fix for adjustTree(); # fixed bug with parent", "first_priority_component = 0 if root_mbr_is_contained == True else 1 second_priority_component = (-1 if", "in x[1]]), x) for x in candidate_distributions] mbr_pair_tagged_candidate_distributions = [((CompositeMBR.makeMBR(x[0][0]), CompositeMBR.makeMBR(x[0][1])), x[1]) for", "+= upper_margin_value_sum S_comp_dict[i] = S_comp_value d_S_pairs = S_comp_dict.items() min_S_value = min([x[1] for x", "comp_1a = upper_left[i] comp_1b = lower_right[i] term1 = comp_1b - comp_1a for j", "else: return curr_node return None \"\"\" # a little stilted since we don't", "reference_mbr.doesEnclose(curr_mbr) == False: continue # item = curr_entry # internal_node_stack_deque.appendleft(item) priority = (first_priority_component,", "[], True) entry3 = RTreeEntry(mbr3, node3) node3.setEntry(entry3) tree.insert(entry3) mbr4 = RawMBR(point4, (110, 200,", "to be found for a delete\") # if parent has zero entries after", "< self.getMinimumNumEntriesPerNode() def retrieveEntryForChild(self, node): return (self.child_to_entry_dict)[node] def toString(self): return str(self.getEntries()) class RTreeEntry:", "result1 mbr_collection1 = [x.getMBR() for x in entry_collection1] mbr_collection2 = [x.getMBR() for x", "= 0 y1 = 0 x2 = 47 y2 = 60 next_x1 =" ]
[ "mailmerge_cmd( \"--template\", os.path.join(utils.TESTDATA, \"simple_template.txt\"), \"--database\", os.path.join(utils.TESTDATA, \"simple_database.csv\"), \"--config\", os.path.join(utils.TESTDATA, \"server_open.conf\"), \"--no-limit\", \"--dry-run\", )", "\"--template\", os.path.join(utils.TESTDATA, \"simple_template.txt\"), \"--database\", os.path.join(utils.TESTDATA, \"simple_database.csv\"), \"--config\", os.path.join(utils.TESTDATA, \"server_open.conf\"), \"--no-limit\", \"--dry-run\", ) #", "import utils def test_stdout(): \"\"\"Verify stdout and stderr. pytest docs on capturing stdout", "\"simple_template.txt\"), \"--database\", os.path.join(utils.TESTDATA, \"simple_database.csv\"), \"--config\", os.path.join(utils.TESTDATA, \"server_open.conf\"), \"--no-limit\", \"--dry-run\", ) # Verify mailmerge", "message 1 TO: <EMAIL> SUBJECT: Testing mailmerge FROM: My Self <<EMAIL>> MIME-Version: 1.0", "stderr = output.stderr.decode(\"utf-8\") assert stderr == \"\" assert \"Date:\" in stdout stdout =", "MIME-Version: 1.0 Content-Type: text/plain; charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi, Myself, Your number is 17.", "This was a dry run. To send messages, use the --no-dry-run option. \"\"\"", "MIME-Version: 1.0 Content-Type: text/plain; charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi, Bob, Your number is 42.", "match exactly. stdout = output.stdout.decode(\"utf-8\") stderr = output.stderr.decode(\"utf-8\") assert stderr == \"\" assert", "Content-Transfer-Encoding: 7bit Hi, Myself, Your number is 17. >>> sent message 0 >>>", "stdout and stderr. pytest docs on capturing stdout and stderr https://pytest.readthedocs.io/en/2.7.3/capture.html \"\"\" mailmerge_cmd", "Your number is 42. >>> sent message 1 >>> This was a dry", "and stderr. pytest docs on capturing stdout and stderr https://pytest.readthedocs.io/en/2.7.3/capture.html \"\"\" mailmerge_cmd =", "\"--config\", os.path.join(utils.TESTDATA, \"server_open.conf\"), \"--no-limit\", \"--dry-run\", ) # Verify mailmerge output. We'll filter out", "re import sh from . import utils def test_stdout(): \"\"\"Verify stdout and stderr.", "is 42. >>> sent message 1 >>> This was a dry run. To", "We'll filter out the Date header because it # won't match exactly. stdout", "= re.sub(r\"Date.*\\n\", \"\", stdout) assert stdout == \"\"\">>> message 0 TO: <EMAIL> SUBJECT:", "stderr == \"\" assert \"Date:\" in stdout stdout = re.sub(r\"Date.*\\n\", \"\", stdout) assert", ">>> message 1 TO: <EMAIL> SUBJECT: Testing mailmerge FROM: My Self <<EMAIL>> MIME-Version:", "utils def test_stdout(): \"\"\"Verify stdout and stderr. pytest docs on capturing stdout and", "stderr https://pytest.readthedocs.io/en/2.7.3/capture.html \"\"\" mailmerge_cmd = sh.Command(\"mailmerge\") output = mailmerge_cmd( \"--template\", os.path.join(utils.TESTDATA, \"simple_template.txt\"), \"--database\",", "Verify mailmerge output. We'll filter out the Date header because it # won't", "TO: <EMAIL> SUBJECT: Testing mailmerge FROM: My Self <<EMAIL>> MIME-Version: 1.0 Content-Type: text/plain;", "output. We'll filter out the Date header because it # won't match exactly.", "assert stderr == \"\" assert \"Date:\" in stdout stdout = re.sub(r\"Date.*\\n\", \"\", stdout)", "\"\"\" System tests. <NAME> <<EMAIL>> \"\"\" import os import re import sh from", "= output.stderr.decode(\"utf-8\") assert stderr == \"\" assert \"Date:\" in stdout stdout = re.sub(r\"Date.*\\n\",", "stdout == \"\"\">>> message 0 TO: <EMAIL> SUBJECT: Testing mailmerge FROM: My Self", "import sh from . import utils def test_stdout(): \"\"\"Verify stdout and stderr. pytest", "os import re import sh from . import utils def test_stdout(): \"\"\"Verify stdout", ">>> sent message 0 >>> message 1 TO: <EMAIL> SUBJECT: Testing mailmerge FROM:", "charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi, Bob, Your number is 42. >>> sent message 1", "\"--no-limit\", \"--dry-run\", ) # Verify mailmerge output. We'll filter out the Date header", "sent message 1 >>> This was a dry run. To send messages, use", "1.0 Content-Type: text/plain; charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi, Myself, Your number is 17. >>>", "sent message 0 >>> message 1 TO: <EMAIL> SUBJECT: Testing mailmerge FROM: My", "1 >>> This was a dry run. To send messages, use the --no-dry-run", "\"server_open.conf\"), \"--no-limit\", \"--dry-run\", ) # Verify mailmerge output. We'll filter out the Date", "<EMAIL> SUBJECT: Testing mailmerge FROM: My Self <<EMAIL>> MIME-Version: 1.0 Content-Type: text/plain; charset=\"us-ascii\"", "tests. <NAME> <<EMAIL>> \"\"\" import os import re import sh from . import", "import re import sh from . import utils def test_stdout(): \"\"\"Verify stdout and", "test_stdout(): \"\"\"Verify stdout and stderr. pytest docs on capturing stdout and stderr https://pytest.readthedocs.io/en/2.7.3/capture.html", "number is 17. >>> sent message 0 >>> message 1 TO: <EMAIL> SUBJECT:", "= mailmerge_cmd( \"--template\", os.path.join(utils.TESTDATA, \"simple_template.txt\"), \"--database\", os.path.join(utils.TESTDATA, \"simple_database.csv\"), \"--config\", os.path.join(utils.TESTDATA, \"server_open.conf\"), \"--no-limit\", \"--dry-run\",", "<NAME> <<EMAIL>> \"\"\" import os import re import sh from . import utils", "https://pytest.readthedocs.io/en/2.7.3/capture.html \"\"\" mailmerge_cmd = sh.Command(\"mailmerge\") output = mailmerge_cmd( \"--template\", os.path.join(utils.TESTDATA, \"simple_template.txt\"), \"--database\", os.path.join(utils.TESTDATA,", "\"\"\" mailmerge_cmd = sh.Command(\"mailmerge\") output = mailmerge_cmd( \"--template\", os.path.join(utils.TESTDATA, \"simple_template.txt\"), \"--database\", os.path.join(utils.TESTDATA, \"simple_database.csv\"),", ">>> This was a dry run. To send messages, use the --no-dry-run option.", "output.stderr.decode(\"utf-8\") assert stderr == \"\" assert \"Date:\" in stdout stdout = re.sub(r\"Date.*\\n\", \"\",", "Self <<EMAIL>> MIME-Version: 1.0 Content-Type: text/plain; charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi, Myself, Your number", "charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi, Myself, Your number is 17. >>> sent message 0", "os.path.join(utils.TESTDATA, \"server_open.conf\"), \"--no-limit\", \"--dry-run\", ) # Verify mailmerge output. We'll filter out the", "message 1 >>> This was a dry run. To send messages, use the", "\"--database\", os.path.join(utils.TESTDATA, \"simple_database.csv\"), \"--config\", os.path.join(utils.TESTDATA, \"server_open.conf\"), \"--no-limit\", \"--dry-run\", ) # Verify mailmerge output.", "number is 42. >>> sent message 1 >>> This was a dry run.", "the Date header because it # won't match exactly. stdout = output.stdout.decode(\"utf-8\") stderr", "docs on capturing stdout and stderr https://pytest.readthedocs.io/en/2.7.3/capture.html \"\"\" mailmerge_cmd = sh.Command(\"mailmerge\") output =", "because it # won't match exactly. stdout = output.stdout.decode(\"utf-8\") stderr = output.stderr.decode(\"utf-8\") assert", "Content-Type: text/plain; charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi, Myself, Your number is 17. >>> sent", "output = mailmerge_cmd( \"--template\", os.path.join(utils.TESTDATA, \"simple_template.txt\"), \"--database\", os.path.join(utils.TESTDATA, \"simple_database.csv\"), \"--config\", os.path.join(utils.TESTDATA, \"server_open.conf\"), \"--no-limit\",", "<<EMAIL>> MIME-Version: 1.0 Content-Type: text/plain; charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi, Bob, Your number is", "message 0 TO: <EMAIL> SUBJECT: Testing mailmerge FROM: My Self <<EMAIL>> MIME-Version: 1.0", "os.path.join(utils.TESTDATA, \"simple_database.csv\"), \"--config\", os.path.join(utils.TESTDATA, \"server_open.conf\"), \"--no-limit\", \"--dry-run\", ) # Verify mailmerge output. We'll", "# Verify mailmerge output. We'll filter out the Date header because it #", "Content-Transfer-Encoding: 7bit Hi, Bob, Your number is 42. >>> sent message 1 >>>", "SUBJECT: Testing mailmerge FROM: My Self <<EMAIL>> MIME-Version: 1.0 Content-Type: text/plain; charset=\"us-ascii\" Content-Transfer-Encoding:", "it # won't match exactly. stdout = output.stdout.decode(\"utf-8\") stderr = output.stderr.decode(\"utf-8\") assert stderr", "My Self <<EMAIL>> MIME-Version: 1.0 Content-Type: text/plain; charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi, Myself, Your", ". import utils def test_stdout(): \"\"\"Verify stdout and stderr. pytest docs on capturing", "mailmerge FROM: My Self <<EMAIL>> MIME-Version: 1.0 Content-Type: text/plain; charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi,", "is 17. >>> sent message 0 >>> message 1 TO: <EMAIL> SUBJECT: Testing", "exactly. stdout = output.stdout.decode(\"utf-8\") stderr = output.stderr.decode(\"utf-8\") assert stderr == \"\" assert \"Date:\"", ">>> sent message 1 >>> This was a dry run. To send messages,", "= sh.Command(\"mailmerge\") output = mailmerge_cmd( \"--template\", os.path.join(utils.TESTDATA, \"simple_template.txt\"), \"--database\", os.path.join(utils.TESTDATA, \"simple_database.csv\"), \"--config\", os.path.join(utils.TESTDATA,", "<<EMAIL>> \"\"\" import os import re import sh from . import utils def", "import os import re import sh from . import utils def test_stdout(): \"\"\"Verify", "0 TO: <EMAIL> SUBJECT: Testing mailmerge FROM: My Self <<EMAIL>> MIME-Version: 1.0 Content-Type:", "stderr. pytest docs on capturing stdout and stderr https://pytest.readthedocs.io/en/2.7.3/capture.html \"\"\" mailmerge_cmd = sh.Command(\"mailmerge\")", "stdout and stderr https://pytest.readthedocs.io/en/2.7.3/capture.html \"\"\" mailmerge_cmd = sh.Command(\"mailmerge\") output = mailmerge_cmd( \"--template\", os.path.join(utils.TESTDATA,", "header because it # won't match exactly. stdout = output.stdout.decode(\"utf-8\") stderr = output.stderr.decode(\"utf-8\")", "<<EMAIL>> MIME-Version: 1.0 Content-Type: text/plain; charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi, Myself, Your number is", "stdout = re.sub(r\"Date.*\\n\", \"\", stdout) assert stdout == \"\"\">>> message 0 TO: <EMAIL>", "\"\"\" import os import re import sh from . import utils def test_stdout():", "Date header because it # won't match exactly. stdout = output.stdout.decode(\"utf-8\") stderr =", "in stdout stdout = re.sub(r\"Date.*\\n\", \"\", stdout) assert stdout == \"\"\">>> message 0", "on capturing stdout and stderr https://pytest.readthedocs.io/en/2.7.3/capture.html \"\"\" mailmerge_cmd = sh.Command(\"mailmerge\") output = mailmerge_cmd(", "Myself, Your number is 17. >>> sent message 0 >>> message 1 TO:", "0 >>> message 1 TO: <EMAIL> SUBJECT: Testing mailmerge FROM: My Self <<EMAIL>>", "Bob, Your number is 42. >>> sent message 1 >>> This was a", "text/plain; charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi, Bob, Your number is 42. >>> sent message", "text/plain; charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi, Myself, Your number is 17. >>> sent message", "won't match exactly. stdout = output.stdout.decode(\"utf-8\") stderr = output.stderr.decode(\"utf-8\") assert stderr == \"\"", "filter out the Date header because it # won't match exactly. stdout =", "Self <<EMAIL>> MIME-Version: 1.0 Content-Type: text/plain; charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi, Bob, Your number", "assert \"Date:\" in stdout stdout = re.sub(r\"Date.*\\n\", \"\", stdout) assert stdout == \"\"\">>>", "\"Date:\" in stdout stdout = re.sub(r\"Date.*\\n\", \"\", stdout) assert stdout == \"\"\">>> message", "output.stdout.decode(\"utf-8\") stderr = output.stderr.decode(\"utf-8\") assert stderr == \"\" assert \"Date:\" in stdout stdout", "42. >>> sent message 1 >>> This was a dry run. To send", "FROM: My Self <<EMAIL>> MIME-Version: 1.0 Content-Type: text/plain; charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi, Myself,", "message 0 >>> message 1 TO: <EMAIL> SUBJECT: Testing mailmerge FROM: My Self", "== \"\"\">>> message 0 TO: <EMAIL> SUBJECT: Testing mailmerge FROM: My Self <<EMAIL>>", "\"--dry-run\", ) # Verify mailmerge output. We'll filter out the Date header because", "\"\" assert \"Date:\" in stdout stdout = re.sub(r\"Date.*\\n\", \"\", stdout) assert stdout ==", "\"\"\"Verify stdout and stderr. pytest docs on capturing stdout and stderr https://pytest.readthedocs.io/en/2.7.3/capture.html \"\"\"", "assert stdout == \"\"\">>> message 0 TO: <EMAIL> SUBJECT: Testing mailmerge FROM: My", "os.path.join(utils.TESTDATA, \"simple_template.txt\"), \"--database\", os.path.join(utils.TESTDATA, \"simple_database.csv\"), \"--config\", os.path.join(utils.TESTDATA, \"server_open.conf\"), \"--no-limit\", \"--dry-run\", ) # Verify", "def test_stdout(): \"\"\"Verify stdout and stderr. pytest docs on capturing stdout and stderr", "stdout = output.stdout.decode(\"utf-8\") stderr = output.stderr.decode(\"utf-8\") assert stderr == \"\" assert \"Date:\" in", "\"\"\">>> message 0 TO: <EMAIL> SUBJECT: Testing mailmerge FROM: My Self <<EMAIL>> MIME-Version:", "stdout) assert stdout == \"\"\">>> message 0 TO: <EMAIL> SUBJECT: Testing mailmerge FROM:", "7bit Hi, Bob, Your number is 42. >>> sent message 1 >>> This", "sh from . import utils def test_stdout(): \"\"\"Verify stdout and stderr. pytest docs", "out the Date header because it # won't match exactly. stdout = output.stdout.decode(\"utf-8\")", "Content-Type: text/plain; charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi, Bob, Your number is 42. >>> sent", "and stderr https://pytest.readthedocs.io/en/2.7.3/capture.html \"\"\" mailmerge_cmd = sh.Command(\"mailmerge\") output = mailmerge_cmd( \"--template\", os.path.join(utils.TESTDATA, \"simple_template.txt\"),", "re.sub(r\"Date.*\\n\", \"\", stdout) assert stdout == \"\"\">>> message 0 TO: <EMAIL> SUBJECT: Testing", "Hi, Bob, Your number is 42. >>> sent message 1 >>> This was", "# won't match exactly. stdout = output.stdout.decode(\"utf-8\") stderr = output.stderr.decode(\"utf-8\") assert stderr ==", "= output.stdout.decode(\"utf-8\") stderr = output.stderr.decode(\"utf-8\") assert stderr == \"\" assert \"Date:\" in stdout", "17. >>> sent message 0 >>> message 1 TO: <EMAIL> SUBJECT: Testing mailmerge", "Your number is 17. >>> sent message 0 >>> message 1 TO: <EMAIL>", ") # Verify mailmerge output. We'll filter out the Date header because it", "\"simple_database.csv\"), \"--config\", os.path.join(utils.TESTDATA, \"server_open.conf\"), \"--no-limit\", \"--dry-run\", ) # Verify mailmerge output. We'll filter", "\"\", stdout) assert stdout == \"\"\">>> message 0 TO: <EMAIL> SUBJECT: Testing mailmerge", "1 TO: <EMAIL> SUBJECT: Testing mailmerge FROM: My Self <<EMAIL>> MIME-Version: 1.0 Content-Type:", "pytest docs on capturing stdout and stderr https://pytest.readthedocs.io/en/2.7.3/capture.html \"\"\" mailmerge_cmd = sh.Command(\"mailmerge\") output", "Testing mailmerge FROM: My Self <<EMAIL>> MIME-Version: 1.0 Content-Type: text/plain; charset=\"us-ascii\" Content-Transfer-Encoding: 7bit", "FROM: My Self <<EMAIL>> MIME-Version: 1.0 Content-Type: text/plain; charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi, Bob,", "Hi, Myself, Your number is 17. >>> sent message 0 >>> message 1", "sh.Command(\"mailmerge\") output = mailmerge_cmd( \"--template\", os.path.join(utils.TESTDATA, \"simple_template.txt\"), \"--database\", os.path.join(utils.TESTDATA, \"simple_database.csv\"), \"--config\", os.path.join(utils.TESTDATA, \"server_open.conf\"),", "7bit Hi, Myself, Your number is 17. >>> sent message 0 >>> message", "System tests. <NAME> <<EMAIL>> \"\"\" import os import re import sh from .", "mailmerge_cmd = sh.Command(\"mailmerge\") output = mailmerge_cmd( \"--template\", os.path.join(utils.TESTDATA, \"simple_template.txt\"), \"--database\", os.path.join(utils.TESTDATA, \"simple_database.csv\"), \"--config\",", "from . import utils def test_stdout(): \"\"\"Verify stdout and stderr. pytest docs on", "capturing stdout and stderr https://pytest.readthedocs.io/en/2.7.3/capture.html \"\"\" mailmerge_cmd = sh.Command(\"mailmerge\") output = mailmerge_cmd( \"--template\",", "mailmerge output. We'll filter out the Date header because it # won't match", "stdout stdout = re.sub(r\"Date.*\\n\", \"\", stdout) assert stdout == \"\"\">>> message 0 TO:", "== \"\" assert \"Date:\" in stdout stdout = re.sub(r\"Date.*\\n\", \"\", stdout) assert stdout", "1.0 Content-Type: text/plain; charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi, Bob, Your number is 42. >>>", "My Self <<EMAIL>> MIME-Version: 1.0 Content-Type: text/plain; charset=\"us-ascii\" Content-Transfer-Encoding: 7bit Hi, Bob, Your" ]
[]
[ "null=True) account = models.ForeignKey(Account, blank=True, null=True) counter_account = models.ForeignKey( Account, blank=True, null=True, related_name='counter_expected_transactions'", "= models.IntegerField() reference = models.CharField(max_length=16) description = models.CharField(max_length=140) statement_number = models.IntegerField() class Meta:", "= models.ForeignKey(Account, related_name='counter_transactions') counter_name = models.CharField(max_length=70) account_currency = models.CharField(max_length=3) balance_before = models.DecimalField(max_digits=12, decimal_places=2)", "= models.DecimalField(max_digits=12, decimal_places=2) category = models.ForeignKey(Category, blank=True, null=True) def clean(self): flow_sum = (Flow.objects.filter(transaction=self.transaction)", "= models.ManyToManyField(Flow, blank=True) class Loan(models.Model): title = models.CharField(max_length=100) description = models.TextField(blank=True, null=True) receipt", "title = models.CharField(max_length=100) description = models.TextField(blank=True, null=True) receipt = models.ImageField() outgoing = models.ManyToManyField(Flow,", "account'), ) account_type = models.CharField(max_length=10, choices=ACCOUNT_TYPES, default='other') def __str__(self): return self.iban class Category(models.Model):", "= models.CharField(max_length=3, choices=BOOKING_CODES) sequence_number = models.IntegerField() reference = models.CharField(max_length=16) description = models.CharField(max_length=140) statement_number", "'journal_date') class Flow(models.Model): transaction = models.ForeignKey(Transaction) value = models.DecimalField(max_digits=12, decimal_places=2) category = models.ForeignKey(Category,", "'Kosten/provisies'), ('KST', 'Kosten/provisies'), ('OVB', 'Overboeking'), ('PRM', 'Premies'), ('PRV', 'Provisies'), ('RNT', 'Rente'), ('STO', 'Storno'),", "('GBK', 'GiroBetaalkaart'), ('GEA', 'Geldautomaat'), ('INC', 'Incasso'), ('IDB', 'iDEAL betaling'), ('IMB', 'iDEAL betaling via", "('AF', 'Afboeking'), ('AFB', 'Afbetalen'), ('BEA', 'Betaalautomaat'), ('BIJ', 'Bijboeking'), ('BTL', 'Buitenlandse Overboeking'), ('CHP', 'Chipknip'),", "valuta'), ) global_code = models.CharField(max_length=3, choices=BOOKING_CODES) sequence_number = models.IntegerField() reference = models.CharField(max_length=16) description", "generic, but as of yet there are no use-cases. repeat_after_months = models.IntegerField(blank=True, null=True)", "raise ValidationError(\"Sum of flows cannot exceed transaction!\") class ExpectedTransaction(models.Model): from_date = models.DateField(blank=True, null=True)", "annual_percentage = models.DecimalField(max_digits=5, decimal_places=2) from_date = models.DateField() to_date = models.DateField(blank=True, null=True) # NULL", "This could be much more generic, but as of yet there are no", "ugettext_lazy as _ from django.core.exceptions import ValidationError class Account(models.Model): iban = models.CharField(_('iban'), max_length=34,", "'Savings account'), # ('investment', 'Investment account'), # TODO how to differentiate ('secondparty', 'Second", "= models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True) account = models.ForeignKey(Account, blank=True, null=True) counter_account = models.ForeignKey(", "models.TextField(blank=True, null=True) receipt = models.ImageField() outgoing = models.ManyToManyField(Flow, blank=True) payment = models.ManyToManyField(Flow, blank=True,", "internally identify transaction types. # These are translated to the (potentially more generic)", "('CHP', 'Chipknip'), ('CHQ', 'Cheque'), ('COR', 'Correctie'), ('DIV', 'Diversen'), ('EFF', 'Effectenboeking'), ('ETC', 'Euro traveller", "= models.ForeignKey(Account, blank=True, null=True) counter_account = models.ForeignKey( Account, blank=True, null=True, related_name='counter_expected_transactions' ) category", "= models.DateField() account = models.ForeignKey(Account) counter_account = models.ForeignKey(Account, related_name='counter_transactions') counter_name = models.CharField(max_length=70) account_currency", "types. # These are translated to the (potentially more generic) global code. internal_code", "Category(models.Model): name = models.CharField(max_length=100) # This model is purposefully specific to ASN bank", "value_date = models.DateField() # ASN bank uses this field to internally identify transaction", "= models.ForeignKey(Category, blank=True, null=True) def clean(self): flow_sum = (Flow.objects.filter(transaction=self.transaction) .exclude(pk=self.pk) .aggregate(Sum('value'))) if abs(flow_sum", "BOOKING_CODES = ( ('ACC', 'Acceptgirobetaling'), ('AF', 'Afboeking'), ('AFB', 'Afbetalen'), ('BEA', 'Betaalautomaat'), ('BIJ', 'Bijboeking'),", "at the moment. class Transaction(models.Model): booking_date = models.DateField() account = models.ForeignKey(Account) counter_account =", "the main use case for development of bogrod at the moment. class Transaction(models.Model):", "models.ForeignKey(Account, blank=True, null=True) counter_account = models.ForeignKey( Account, blank=True, null=True, related_name='counter_expected_transactions' ) category =", "models.ForeignKey(Account) counter_account = models.ForeignKey(Account, related_name='counter_transactions') counter_name = models.CharField(max_length=70) account_currency = models.CharField(max_length=3) balance_before =", "via mobiel'), ('IOB', 'Interne Overboeking'), ('KAS', 'Kas post'), ('KTN', 'Kosten/provisies'), ('KST', 'Kosten/provisies'), ('OVB',", "('IOB', 'Interne Overboeking'), ('KAS', 'Kas post'), ('KTN', 'Kosten/provisies'), ('KST', 'Kosten/provisies'), ('OVB', 'Overboeking'), ('PRM',", "= models.DateField(blank=True, null=True) to_date = models.DateField(blank=True, null=True) from_value = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)", "# TODO how to differentiate ('secondparty', 'Second party account'), ('other', 'Other account'), )", "'Diversen'), ('EFF', 'Effectenboeking'), ('ETC', 'Euro traveller cheques'), ('GBK', 'GiroBetaalkaart'), ('GEA', 'Geldautomaat'), ('INC', 'Incasso'),", "'Euro traveller cheques'), ('GBK', 'GiroBetaalkaart'), ('GEA', 'Geldautomaat'), ('INC', 'Incasso'), ('IDB', 'iDEAL betaling'), ('IMB',", "= models.DateField() # ASN bank uses this field to internally identify transaction types.", "'Chipknip'), ('CHQ', 'Cheque'), ('COR', 'Correctie'), ('DIV', 'Diversen'), ('EFF', 'Effectenboeking'), ('ETC', 'Euro traveller cheques'),", "counter_account = models.ForeignKey( Account, blank=True, null=True, related_name='counter_expected_transactions' ) category = models.ForeignKey(Category, blank=True, null=True)", "> abs(self.transaction.value): raise ValidationError(\"Sum of flows cannot exceed transaction!\") class ExpectedTransaction(models.Model): from_date =", "unique=True) ACCOUNT_TYPES = ( ('checking', 'Checking account'), ('savings', 'Savings account'), # ('investment', 'Investment", "null=True) from_value = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True) to_value = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True)", "class Meta: unique_together = ('sequence_number', 'journal_date') class Flow(models.Model): transaction = models.ForeignKey(Transaction) value =", "'Geldautomaat'), ('INC', 'Incasso'), ('IDB', 'iDEAL betaling'), ('IMB', 'iDEAL betaling via mobiel'), ('IOB', 'Interne", "return self.iban class Category(models.Model): name = models.CharField(max_length=100) # This model is purposefully specific", "more generic, but as of yet there are no use-cases. repeat_after_months = models.IntegerField(blank=True,", "models.CharField(max_length=3, choices=BOOKING_CODES) sequence_number = models.IntegerField() reference = models.CharField(max_length=16) description = models.CharField(max_length=140) statement_number =", "'Checking account'), ('savings', 'Savings account'), # ('investment', 'Investment account'), # TODO how to", "= models.ForeignKey( Account, blank=True, null=True, related_name='counter_expected_transactions' ) category = models.ForeignKey(Category, blank=True, null=True) #", "class Account(models.Model): iban = models.CharField(_('iban'), max_length=34, unique=True) ACCOUNT_TYPES = ( ('checking', 'Checking account'),", "from django.db import models from django.db.models import Sum from django.utils.translation import ugettext_lazy as", "models.CharField(max_length=16) description = models.CharField(max_length=140) statement_number = models.IntegerField() class Meta: unique_together = ('sequence_number', 'journal_date')", "more generic) global code. internal_code = models.IntegerField() BOOKING_CODES = ( ('ACC', 'Acceptgirobetaling'), ('AF',", "= models.ForeignKey(Transaction) value = models.DecimalField(max_digits=12, decimal_places=2) category = models.ForeignKey(Category, blank=True, null=True) def clean(self):", "related_name='repaid_loans') class InterestPeriod(models.Model): annual_percentage = models.DecimalField(max_digits=5, decimal_places=2) from_date = models.DateField() to_date = models.DateField(blank=True,", "choices=BOOKING_CODES) sequence_number = models.IntegerField() reference = models.CharField(max_length=16) description = models.CharField(max_length=140) statement_number = models.IntegerField()", "= models.DecimalField(max_digits=12, decimal_places=2) mutation_currency = models.CharField(max_length=3) mutation_value = models.DecimalField(max_digits=12, decimal_places=2) journal_date = models.DateField()", "'Telefonische Overboeking'), ('VV', 'Vreemde valuta'), ) global_code = models.CharField(max_length=3, choices=BOOKING_CODES) sequence_number = models.IntegerField()", "models.DateField(blank=True, null=True) to_date = models.DateField(blank=True, null=True) from_value = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True) to_value", "journal_date = models.DateField() value_date = models.DateField() # ASN bank uses this field to", "specific to ASN bank transactions, as that is # the main use case", "= ('sequence_number', 'journal_date') class Flow(models.Model): transaction = models.ForeignKey(Transaction) value = models.DecimalField(max_digits=12, decimal_places=2) category", "= ( ('ACC', 'Acceptgirobetaling'), ('AF', 'Afboeking'), ('AFB', 'Afbetalen'), ('BEA', 'Betaalautomaat'), ('BIJ', 'Bijboeking'), ('BTL',", "('checking', 'Checking account'), ('savings', 'Savings account'), # ('investment', 'Investment account'), # TODO how", "related_name='counter_transactions') counter_name = models.CharField(max_length=70) account_currency = models.CharField(max_length=3) balance_before = models.DecimalField(max_digits=12, decimal_places=2) mutation_currency =", "differentiate ('secondparty', 'Second party account'), ('other', 'Other account'), ) account_type = models.CharField(max_length=10, choices=ACCOUNT_TYPES,", "that is # the main use case for development of bogrod at the", "models.DateField() value_date = models.DateField() # ASN bank uses this field to internally identify", "outgoing = models.ManyToManyField(Flow, blank=True) payment = models.ManyToManyField(Flow, blank=True, related_name='repaid_loans') class InterestPeriod(models.Model): annual_percentage =", "models from django.db.models import Sum from django.utils.translation import ugettext_lazy as _ from django.core.exceptions", "models.CharField(_('iban'), max_length=34, unique=True) ACCOUNT_TYPES = ( ('checking', 'Checking account'), ('savings', 'Savings account'), #", "= models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True) to_value = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True) account =", "models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True) to_value = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True) account = models.ForeignKey(Account,", "__str__(self): return self.iban class Category(models.Model): name = models.CharField(max_length=100) # This model is purposefully", "def clean(self): flow_sum = (Flow.objects.filter(transaction=self.transaction) .exclude(pk=self.pk) .aggregate(Sum('value'))) if abs(flow_sum + self.value) > abs(self.transaction.value):", "models.IntegerField() class Meta: unique_together = ('sequence_number', 'journal_date') class Flow(models.Model): transaction = models.ForeignKey(Transaction) value", "account_type = models.CharField(max_length=10, choices=ACCOUNT_TYPES, default='other') def __str__(self): return self.iban class Category(models.Model): name =", "uses this field to internally identify transaction types. # These are translated to", "name = models.CharField(max_length=100) # This model is purposefully specific to ASN bank transactions,", "('AFB', 'Afbetalen'), ('BEA', 'Betaalautomaat'), ('BIJ', 'Bijboeking'), ('BTL', 'Buitenlandse Overboeking'), ('CHP', 'Chipknip'), ('CHQ', 'Cheque'),", "from django.utils.translation import ugettext_lazy as _ from django.core.exceptions import ValidationError class Account(models.Model): iban", "(Flow.objects.filter(transaction=self.transaction) .exclude(pk=self.pk) .aggregate(Sum('value'))) if abs(flow_sum + self.value) > abs(self.transaction.value): raise ValidationError(\"Sum of flows", "= models.DateField() value_date = models.DateField() # ASN bank uses this field to internally", ") global_code = models.CharField(max_length=3, choices=BOOKING_CODES) sequence_number = models.IntegerField() reference = models.CharField(max_length=16) description =", "('sequence_number', 'journal_date') class Flow(models.Model): transaction = models.ForeignKey(Transaction) value = models.DecimalField(max_digits=12, decimal_places=2) category =", "to the (potentially more generic) global code. internal_code = models.IntegerField() BOOKING_CODES = (", "# ASN bank uses this field to internally identify transaction types. # These", "to ASN bank transactions, as that is # the main use case for", "the (potentially more generic) global code. internal_code = models.IntegerField() BOOKING_CODES = ( ('ACC',", "once the transaction has occurred flows = models.ManyToManyField(Flow, blank=True) class Loan(models.Model): title =", "def __str__(self): return self.iban class Category(models.Model): name = models.CharField(max_length=100) # This model is", "null=True) to_date = models.DateField(blank=True, null=True) from_value = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True) to_value =", "models.IntegerField() BOOKING_CODES = ( ('ACC', 'Acceptgirobetaling'), ('AF', 'Afboeking'), ('AFB', 'Afbetalen'), ('BEA', 'Betaalautomaat'), ('BIJ',", "'Investment account'), # TODO how to differentiate ('secondparty', 'Second party account'), ('other', 'Other", "= models.CharField(max_length=3) balance_before = models.DecimalField(max_digits=12, decimal_places=2) mutation_currency = models.CharField(max_length=3) mutation_value = models.DecimalField(max_digits=12, decimal_places=2)", "Meta: unique_together = ('sequence_number', 'journal_date') class Flow(models.Model): transaction = models.ForeignKey(Transaction) value = models.DecimalField(max_digits=12,", "party account'), ('other', 'Other account'), ) account_type = models.CharField(max_length=10, choices=ACCOUNT_TYPES, default='other') def __str__(self):", "'Betaalautomaat'), ('BIJ', 'Bijboeking'), ('BTL', 'Buitenlandse Overboeking'), ('CHP', 'Chipknip'), ('CHQ', 'Cheque'), ('COR', 'Correctie'), ('DIV',", "Overboeking'), ('KAS', 'Kas post'), ('KTN', 'Kosten/provisies'), ('KST', 'Kosten/provisies'), ('OVB', 'Overboeking'), ('PRM', 'Premies'), ('PRV',", "models.ForeignKey(Transaction) value = models.DecimalField(max_digits=12, decimal_places=2) category = models.ForeignKey(Category, blank=True, null=True) def clean(self): flow_sum", "'Afboeking'), ('AFB', 'Afbetalen'), ('BEA', 'Betaalautomaat'), ('BIJ', 'Bijboeking'), ('BTL', 'Buitenlandse Overboeking'), ('CHP', 'Chipknip'), ('CHQ',", "mobiel'), ('IOB', 'Interne Overboeking'), ('KAS', 'Kas post'), ('KTN', 'Kosten/provisies'), ('KST', 'Kosten/provisies'), ('OVB', 'Overboeking'),", "models.CharField(max_length=10, choices=ACCOUNT_TYPES, default='other') def __str__(self): return self.iban class Category(models.Model): name = models.CharField(max_length=100) #", "post'), ('KTN', 'Kosten/provisies'), ('KST', 'Kosten/provisies'), ('OVB', 'Overboeking'), ('PRM', 'Premies'), ('PRV', 'Provisies'), ('RNT', 'Rente'),", "models.DecimalField(max_digits=12, decimal_places=2) journal_date = models.DateField() value_date = models.DateField() # ASN bank uses this", "= models.ForeignKey(Category, blank=True, null=True) # This could be much more generic, but as", "= models.CharField(max_length=16) description = models.CharField(max_length=140) statement_number = models.IntegerField() class Meta: unique_together = ('sequence_number',", "django.utils.translation import ugettext_lazy as _ from django.core.exceptions import ValidationError class Account(models.Model): iban =", "clean(self): flow_sum = (Flow.objects.filter(transaction=self.transaction) .exclude(pk=self.pk) .aggregate(Sum('value'))) if abs(flow_sum + self.value) > abs(self.transaction.value): raise", "class Flow(models.Model): transaction = models.ForeignKey(Transaction) value = models.DecimalField(max_digits=12, decimal_places=2) category = models.ForeignKey(Category, blank=True,", "blank=True, null=True) def clean(self): flow_sum = (Flow.objects.filter(transaction=self.transaction) .exclude(pk=self.pk) .aggregate(Sum('value'))) if abs(flow_sum + self.value)", "global_code = models.CharField(max_length=3, choices=BOOKING_CODES) sequence_number = models.IntegerField() reference = models.CharField(max_length=16) description = models.CharField(max_length=140)", "null=True) # Initialize flow once the transaction has occurred flows = models.ManyToManyField(Flow, blank=True)", "decimal_places=2) mutation_currency = models.CharField(max_length=3) mutation_value = models.DecimalField(max_digits=12, decimal_places=2) journal_date = models.DateField() value_date =", "receipt = models.ImageField() outgoing = models.ManyToManyField(Flow, blank=True) payment = models.ManyToManyField(Flow, blank=True, related_name='repaid_loans') class", "( ('ACC', 'Acceptgirobetaling'), ('AF', 'Afboeking'), ('AFB', 'Afbetalen'), ('BEA', 'Betaalautomaat'), ('BIJ', 'Bijboeking'), ('BTL', 'Buitenlandse", "null=True, related_name='counter_expected_transactions' ) category = models.ForeignKey(Category, blank=True, null=True) # This could be much", "import Sum from django.utils.translation import ugettext_lazy as _ from django.core.exceptions import ValidationError class", "('EFF', 'Effectenboeking'), ('ETC', 'Euro traveller cheques'), ('GBK', 'GiroBetaalkaart'), ('GEA', 'Geldautomaat'), ('INC', 'Incasso'), ('IDB',", "from django.db.models import Sum from django.utils.translation import ugettext_lazy as _ from django.core.exceptions import", "purposefully specific to ASN bank transactions, as that is # the main use", "TODO how to differentiate ('secondparty', 'Second party account'), ('other', 'Other account'), ) account_type", "bogrod at the moment. class Transaction(models.Model): booking_date = models.DateField() account = models.ForeignKey(Account) counter_account", "the moment. class Transaction(models.Model): booking_date = models.DateField() account = models.ForeignKey(Account) counter_account = models.ForeignKey(Account,", "'Storno'), ('TEL', 'Telefonische Overboeking'), ('VV', 'Vreemde valuta'), ) global_code = models.CharField(max_length=3, choices=BOOKING_CODES) sequence_number", "flows = models.ManyToManyField(Flow, blank=True) class Loan(models.Model): title = models.CharField(max_length=100) description = models.TextField(blank=True, null=True)", "= models.DecimalField(max_digits=5, decimal_places=2) from_date = models.DateField() to_date = models.DateField(blank=True, null=True) # NULL signifies", "booking_date = models.DateField() account = models.ForeignKey(Account) counter_account = models.ForeignKey(Account, related_name='counter_transactions') counter_name = models.CharField(max_length=70)", "('TEL', 'Telefonische Overboeking'), ('VV', 'Vreemde valuta'), ) global_code = models.CharField(max_length=3, choices=BOOKING_CODES) sequence_number =", "models.CharField(max_length=140) statement_number = models.IntegerField() class Meta: unique_together = ('sequence_number', 'journal_date') class Flow(models.Model): transaction", "blank=True, related_name='repaid_loans') class InterestPeriod(models.Model): annual_percentage = models.DecimalField(max_digits=5, decimal_places=2) from_date = models.DateField() to_date =", "models.ForeignKey(Account, related_name='counter_transactions') counter_name = models.CharField(max_length=70) account_currency = models.CharField(max_length=3) balance_before = models.DecimalField(max_digits=12, decimal_places=2) mutation_currency", "of yet there are no use-cases. repeat_after_months = models.IntegerField(blank=True, null=True) # Initialize flow", "as that is # the main use case for development of bogrod at", "to_value = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True) account = models.ForeignKey(Account, blank=True, null=True) counter_account =", "= models.ManyToManyField(Flow, blank=True) payment = models.ManyToManyField(Flow, blank=True, related_name='repaid_loans') class InterestPeriod(models.Model): annual_percentage = models.DecimalField(max_digits=5,", "Overboeking'), ('CHP', 'Chipknip'), ('CHQ', 'Cheque'), ('COR', 'Correctie'), ('DIV', 'Diversen'), ('EFF', 'Effectenboeking'), ('ETC', 'Euro", "= models.TextField(blank=True, null=True) receipt = models.ImageField() outgoing = models.ManyToManyField(Flow, blank=True) payment = models.ManyToManyField(Flow,", "<gh_stars>0 from django.db import models from django.db.models import Sum from django.utils.translation import ugettext_lazy", "to internally identify transaction types. # These are translated to the (potentially more", "# These are translated to the (potentially more generic) global code. internal_code =", "of flows cannot exceed transaction!\") class ExpectedTransaction(models.Model): from_date = models.DateField(blank=True, null=True) to_date =", "Loan(models.Model): title = models.CharField(max_length=100) description = models.TextField(blank=True, null=True) receipt = models.ImageField() outgoing =", "main use case for development of bogrod at the moment. class Transaction(models.Model): booking_date", "models.CharField(max_length=3) mutation_value = models.DecimalField(max_digits=12, decimal_places=2) journal_date = models.DateField() value_date = models.DateField() # ASN", "ASN bank uses this field to internally identify transaction types. # These are", "sequence_number = models.IntegerField() reference = models.CharField(max_length=16) description = models.CharField(max_length=140) statement_number = models.IntegerField() class", "('IDB', 'iDEAL betaling'), ('IMB', 'iDEAL betaling via mobiel'), ('IOB', 'Interne Overboeking'), ('KAS', 'Kas", "models.DateField(blank=True, null=True) from_value = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True) to_value = models.DecimalField(max_digits=12, decimal_places=2, blank=True,", "('savings', 'Savings account'), # ('investment', 'Investment account'), # TODO how to differentiate ('secondparty',", "('COR', 'Correctie'), ('DIV', 'Diversen'), ('EFF', 'Effectenboeking'), ('ETC', 'Euro traveller cheques'), ('GBK', 'GiroBetaalkaart'), ('GEA',", "ValidationError(\"Sum of flows cannot exceed transaction!\") class ExpectedTransaction(models.Model): from_date = models.DateField(blank=True, null=True) to_date", "models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True) account = models.ForeignKey(Account, blank=True, null=True) counter_account = models.ForeignKey( Account,", "'Acceptgirobetaling'), ('AF', 'Afboeking'), ('AFB', 'Afbetalen'), ('BEA', 'Betaalautomaat'), ('BIJ', 'Bijboeking'), ('BTL', 'Buitenlandse Overboeking'), ('CHP',", "= models.IntegerField() class Meta: unique_together = ('sequence_number', 'journal_date') class Flow(models.Model): transaction = models.ForeignKey(Transaction)", "('PRV', 'Provisies'), ('RNT', 'Rente'), ('STO', 'Storno'), ('TEL', 'Telefonische Overboeking'), ('VV', 'Vreemde valuta'), )", "('BTL', 'Buitenlandse Overboeking'), ('CHP', 'Chipknip'), ('CHQ', 'Cheque'), ('COR', 'Correctie'), ('DIV', 'Diversen'), ('EFF', 'Effectenboeking'),", "'Buitenlandse Overboeking'), ('CHP', 'Chipknip'), ('CHQ', 'Cheque'), ('COR', 'Correctie'), ('DIV', 'Diversen'), ('EFF', 'Effectenboeking'), ('ETC',", "ValidationError class Account(models.Model): iban = models.CharField(_('iban'), max_length=34, unique=True) ACCOUNT_TYPES = ( ('checking', 'Checking", "class Category(models.Model): name = models.CharField(max_length=100) # This model is purposefully specific to ASN", "('BEA', 'Betaalautomaat'), ('BIJ', 'Bijboeking'), ('BTL', 'Buitenlandse Overboeking'), ('CHP', 'Chipknip'), ('CHQ', 'Cheque'), ('COR', 'Correctie'),", ") category = models.ForeignKey(Category, blank=True, null=True) # This could be much more generic,", "much more generic, but as of yet there are no use-cases. repeat_after_months =", "class InterestPeriod(models.Model): annual_percentage = models.DecimalField(max_digits=5, decimal_places=2) from_date = models.DateField() to_date = models.DateField(blank=True, null=True)", "model is purposefully specific to ASN bank transactions, as that is # the", "('DIV', 'Diversen'), ('EFF', 'Effectenboeking'), ('ETC', 'Euro traveller cheques'), ('GBK', 'GiroBetaalkaart'), ('GEA', 'Geldautomaat'), ('INC',", "decimal_places=2, blank=True, null=True) account = models.ForeignKey(Account, blank=True, null=True) counter_account = models.ForeignKey( Account, blank=True,", "('GEA', 'Geldautomaat'), ('INC', 'Incasso'), ('IDB', 'iDEAL betaling'), ('IMB', 'iDEAL betaling via mobiel'), ('IOB',", "account'), ('other', 'Other account'), ) account_type = models.CharField(max_length=10, choices=ACCOUNT_TYPES, default='other') def __str__(self): return", "'Overboeking'), ('PRM', 'Premies'), ('PRV', 'Provisies'), ('RNT', 'Rente'), ('STO', 'Storno'), ('TEL', 'Telefonische Overboeking'), ('VV',", "development of bogrod at the moment. class Transaction(models.Model): booking_date = models.DateField() account =", "'Correctie'), ('DIV', 'Diversen'), ('EFF', 'Effectenboeking'), ('ETC', 'Euro traveller cheques'), ('GBK', 'GiroBetaalkaart'), ('GEA', 'Geldautomaat'),", "balance_before = models.DecimalField(max_digits=12, decimal_places=2) mutation_currency = models.CharField(max_length=3) mutation_value = models.DecimalField(max_digits=12, decimal_places=2) journal_date =", "flow_sum = (Flow.objects.filter(transaction=self.transaction) .exclude(pk=self.pk) .aggregate(Sum('value'))) if abs(flow_sum + self.value) > abs(self.transaction.value): raise ValidationError(\"Sum", "Sum from django.utils.translation import ugettext_lazy as _ from django.core.exceptions import ValidationError class Account(models.Model):", "null=True) def clean(self): flow_sum = (Flow.objects.filter(transaction=self.transaction) .exclude(pk=self.pk) .aggregate(Sum('value'))) if abs(flow_sum + self.value) >", "ASN bank transactions, as that is # the main use case for development", "if abs(flow_sum + self.value) > abs(self.transaction.value): raise ValidationError(\"Sum of flows cannot exceed transaction!\")", "as _ from django.core.exceptions import ValidationError class Account(models.Model): iban = models.CharField(_('iban'), max_length=34, unique=True)", "to_date = models.DateField(blank=True, null=True) from_value = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True) to_value = models.DecimalField(max_digits=12,", "blank=True, null=True) # This could be much more generic, but as of yet", "mutation_currency = models.CharField(max_length=3) mutation_value = models.DecimalField(max_digits=12, decimal_places=2) journal_date = models.DateField() value_date = models.DateField()", "class ExpectedTransaction(models.Model): from_date = models.DateField(blank=True, null=True) to_date = models.DateField(blank=True, null=True) from_value = models.DecimalField(max_digits=12,", "models.CharField(max_length=70) account_currency = models.CharField(max_length=3) balance_before = models.DecimalField(max_digits=12, decimal_places=2) mutation_currency = models.CharField(max_length=3) mutation_value =", "Account, blank=True, null=True, related_name='counter_expected_transactions' ) category = models.ForeignKey(Category, blank=True, null=True) # This could", "Initialize flow once the transaction has occurred flows = models.ManyToManyField(Flow, blank=True) class Loan(models.Model):", "('BIJ', 'Bijboeking'), ('BTL', 'Buitenlandse Overboeking'), ('CHP', 'Chipknip'), ('CHQ', 'Cheque'), ('COR', 'Correctie'), ('DIV', 'Diversen'),", "'Bijboeking'), ('BTL', 'Buitenlandse Overboeking'), ('CHP', 'Chipknip'), ('CHQ', 'Cheque'), ('COR', 'Correctie'), ('DIV', 'Diversen'), ('EFF',", "self.iban class Category(models.Model): name = models.CharField(max_length=100) # This model is purposefully specific to", "('secondparty', 'Second party account'), ('other', 'Other account'), ) account_type = models.CharField(max_length=10, choices=ACCOUNT_TYPES, default='other')", "'Incasso'), ('IDB', 'iDEAL betaling'), ('IMB', 'iDEAL betaling via mobiel'), ('IOB', 'Interne Overboeking'), ('KAS',", "models.ForeignKey(Category, blank=True, null=True) def clean(self): flow_sum = (Flow.objects.filter(transaction=self.transaction) .exclude(pk=self.pk) .aggregate(Sum('value'))) if abs(flow_sum +", "('INC', 'Incasso'), ('IDB', 'iDEAL betaling'), ('IMB', 'iDEAL betaling via mobiel'), ('IOB', 'Interne Overboeking'),", "for development of bogrod at the moment. class Transaction(models.Model): booking_date = models.DateField() account", "this field to internally identify transaction types. # These are translated to the", "transaction types. # These are translated to the (potentially more generic) global code.", "code. internal_code = models.IntegerField() BOOKING_CODES = ( ('ACC', 'Acceptgirobetaling'), ('AF', 'Afboeking'), ('AFB', 'Afbetalen'),", "= models.ManyToManyField(Flow, blank=True, related_name='repaid_loans') class InterestPeriod(models.Model): annual_percentage = models.DecimalField(max_digits=5, decimal_places=2) from_date = models.DateField()", "ExpectedTransaction(models.Model): from_date = models.DateField(blank=True, null=True) to_date = models.DateField(blank=True, null=True) from_value = models.DecimalField(max_digits=12, decimal_places=2,", "blank=True, null=True) counter_account = models.ForeignKey( Account, blank=True, null=True, related_name='counter_expected_transactions' ) category = models.ForeignKey(Category,", "'Cheque'), ('COR', 'Correctie'), ('DIV', 'Diversen'), ('EFF', 'Effectenboeking'), ('ETC', 'Euro traveller cheques'), ('GBK', 'GiroBetaalkaart'),", "= (Flow.objects.filter(transaction=self.transaction) .exclude(pk=self.pk) .aggregate(Sum('value'))) if abs(flow_sum + self.value) > abs(self.transaction.value): raise ValidationError(\"Sum of", "'Premies'), ('PRV', 'Provisies'), ('RNT', 'Rente'), ('STO', 'Storno'), ('TEL', 'Telefonische Overboeking'), ('VV', 'Vreemde valuta'),", "blank=True) payment = models.ManyToManyField(Flow, blank=True, related_name='repaid_loans') class InterestPeriod(models.Model): annual_percentage = models.DecimalField(max_digits=5, decimal_places=2) from_date", "django.core.exceptions import ValidationError class Account(models.Model): iban = models.CharField(_('iban'), max_length=34, unique=True) ACCOUNT_TYPES = (", "description = models.TextField(blank=True, null=True) receipt = models.ImageField() outgoing = models.ManyToManyField(Flow, blank=True) payment =", "traveller cheques'), ('GBK', 'GiroBetaalkaart'), ('GEA', 'Geldautomaat'), ('INC', 'Incasso'), ('IDB', 'iDEAL betaling'), ('IMB', 'iDEAL", "('KTN', 'Kosten/provisies'), ('KST', 'Kosten/provisies'), ('OVB', 'Overboeking'), ('PRM', 'Premies'), ('PRV', 'Provisies'), ('RNT', 'Rente'), ('STO',", "= models.IntegerField() BOOKING_CODES = ( ('ACC', 'Acceptgirobetaling'), ('AF', 'Afboeking'), ('AFB', 'Afbetalen'), ('BEA', 'Betaalautomaat'),", "to differentiate ('secondparty', 'Second party account'), ('other', 'Other account'), ) account_type = models.CharField(max_length=10,", "'Second party account'), ('other', 'Other account'), ) account_type = models.CharField(max_length=10, choices=ACCOUNT_TYPES, default='other') def", "abs(self.transaction.value): raise ValidationError(\"Sum of flows cannot exceed transaction!\") class ExpectedTransaction(models.Model): from_date = models.DateField(blank=True,", "= models.IntegerField(blank=True, null=True) # Initialize flow once the transaction has occurred flows =", "'Kas post'), ('KTN', 'Kosten/provisies'), ('KST', 'Kosten/provisies'), ('OVB', 'Overboeking'), ('PRM', 'Premies'), ('PRV', 'Provisies'), ('RNT',", "= models.ForeignKey(Account) counter_account = models.ForeignKey(Account, related_name='counter_transactions') counter_name = models.CharField(max_length=70) account_currency = models.CharField(max_length=3) balance_before", "'GiroBetaalkaart'), ('GEA', 'Geldautomaat'), ('INC', 'Incasso'), ('IDB', 'iDEAL betaling'), ('IMB', 'iDEAL betaling via mobiel'),", "identify transaction types. # These are translated to the (potentially more generic) global", "= models.DateField() to_date = models.DateField(blank=True, null=True) # NULL signifies today loan = models.ForeignKey(Loan)", "('other', 'Other account'), ) account_type = models.CharField(max_length=10, choices=ACCOUNT_TYPES, default='other') def __str__(self): return self.iban", "( ('checking', 'Checking account'), ('savings', 'Savings account'), # ('investment', 'Investment account'), # TODO", "models.IntegerField() reference = models.CharField(max_length=16) description = models.CharField(max_length=140) statement_number = models.IntegerField() class Meta: unique_together", "'Interne Overboeking'), ('KAS', 'Kas post'), ('KTN', 'Kosten/provisies'), ('KST', 'Kosten/provisies'), ('OVB', 'Overboeking'), ('PRM', 'Premies'),", "decimal_places=2) from_date = models.DateField() to_date = models.DateField(blank=True, null=True) # NULL signifies today loan", "reference = models.CharField(max_length=16) description = models.CharField(max_length=140) statement_number = models.IntegerField() class Meta: unique_together =", "'Kosten/provisies'), ('OVB', 'Overboeking'), ('PRM', 'Premies'), ('PRV', 'Provisies'), ('RNT', 'Rente'), ('STO', 'Storno'), ('TEL', 'Telefonische", "django.db import models from django.db.models import Sum from django.utils.translation import ugettext_lazy as _", "how to differentiate ('secondparty', 'Second party account'), ('other', 'Other account'), ) account_type =", "decimal_places=2) journal_date = models.DateField() value_date = models.DateField() # ASN bank uses this field", "moment. class Transaction(models.Model): booking_date = models.DateField() account = models.ForeignKey(Account) counter_account = models.ForeignKey(Account, related_name='counter_transactions')", "= models.CharField(max_length=100) description = models.TextField(blank=True, null=True) receipt = models.ImageField() outgoing = models.ManyToManyField(Flow, blank=True)", "transaction = models.ForeignKey(Transaction) value = models.DecimalField(max_digits=12, decimal_places=2) category = models.ForeignKey(Category, blank=True, null=True) def", "blank=True) class Loan(models.Model): title = models.CharField(max_length=100) description = models.TextField(blank=True, null=True) receipt = models.ImageField()", "= models.DecimalField(max_digits=12, decimal_places=2) journal_date = models.DateField() value_date = models.DateField() # ASN bank uses", "Flow(models.Model): transaction = models.ForeignKey(Transaction) value = models.DecimalField(max_digits=12, decimal_places=2) category = models.ForeignKey(Category, blank=True, null=True)", "# This model is purposefully specific to ASN bank transactions, as that is", "yet there are no use-cases. repeat_after_months = models.IntegerField(blank=True, null=True) # Initialize flow once", "import models from django.db.models import Sum from django.utils.translation import ugettext_lazy as _ from", "# the main use case for development of bogrod at the moment. class", "('investment', 'Investment account'), # TODO how to differentiate ('secondparty', 'Second party account'), ('other',", "internal_code = models.IntegerField() BOOKING_CODES = ( ('ACC', 'Acceptgirobetaling'), ('AF', 'Afboeking'), ('AFB', 'Afbetalen'), ('BEA',", "unique_together = ('sequence_number', 'journal_date') class Flow(models.Model): transaction = models.ForeignKey(Transaction) value = models.DecimalField(max_digits=12, decimal_places=2)", "be much more generic, but as of yet there are no use-cases. repeat_after_months", "account'), # TODO how to differentiate ('secondparty', 'Second party account'), ('other', 'Other account'),", "These are translated to the (potentially more generic) global code. internal_code = models.IntegerField()", "betaling'), ('IMB', 'iDEAL betaling via mobiel'), ('IOB', 'Interne Overboeking'), ('KAS', 'Kas post'), ('KTN',", "value = models.DecimalField(max_digits=12, decimal_places=2) category = models.ForeignKey(Category, blank=True, null=True) def clean(self): flow_sum =", "Overboeking'), ('VV', 'Vreemde valuta'), ) global_code = models.CharField(max_length=3, choices=BOOKING_CODES) sequence_number = models.IntegerField() reference", "are translated to the (potentially more generic) global code. internal_code = models.IntegerField() BOOKING_CODES", "default='other') def __str__(self): return self.iban class Category(models.Model): name = models.CharField(max_length=100) # This model", "category = models.ForeignKey(Category, blank=True, null=True) # This could be much more generic, but", "= models.DateField(blank=True, null=True) from_value = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True) to_value = models.DecimalField(max_digits=12, decimal_places=2,", "= models.CharField(max_length=100) # This model is purposefully specific to ASN bank transactions, as", "could be much more generic, but as of yet there are no use-cases.", "are no use-cases. repeat_after_months = models.IntegerField(blank=True, null=True) # Initialize flow once the transaction", "models.CharField(max_length=100) description = models.TextField(blank=True, null=True) receipt = models.ImageField() outgoing = models.ManyToManyField(Flow, blank=True) payment", "case for development of bogrod at the moment. class Transaction(models.Model): booking_date = models.DateField()", "use-cases. repeat_after_months = models.IntegerField(blank=True, null=True) # Initialize flow once the transaction has occurred", "('KAS', 'Kas post'), ('KTN', 'Kosten/provisies'), ('KST', 'Kosten/provisies'), ('OVB', 'Overboeking'), ('PRM', 'Premies'), ('PRV', 'Provisies'),", "self.value) > abs(self.transaction.value): raise ValidationError(\"Sum of flows cannot exceed transaction!\") class ExpectedTransaction(models.Model): from_date", "'Provisies'), ('RNT', 'Rente'), ('STO', 'Storno'), ('TEL', 'Telefonische Overboeking'), ('VV', 'Vreemde valuta'), ) global_code", "('ETC', 'Euro traveller cheques'), ('GBK', 'GiroBetaalkaart'), ('GEA', 'Geldautomaat'), ('INC', 'Incasso'), ('IDB', 'iDEAL betaling'),", "models.ImageField() outgoing = models.ManyToManyField(Flow, blank=True) payment = models.ManyToManyField(Flow, blank=True, related_name='repaid_loans') class InterestPeriod(models.Model): annual_percentage", "there are no use-cases. repeat_after_months = models.IntegerField(blank=True, null=True) # Initialize flow once the", "models.DecimalField(max_digits=12, decimal_places=2) mutation_currency = models.CharField(max_length=3) mutation_value = models.DecimalField(max_digits=12, decimal_places=2) journal_date = models.DateField() value_date", "import ValidationError class Account(models.Model): iban = models.CharField(_('iban'), max_length=34, unique=True) ACCOUNT_TYPES = ( ('checking',", "'iDEAL betaling'), ('IMB', 'iDEAL betaling via mobiel'), ('IOB', 'Interne Overboeking'), ('KAS', 'Kas post'),", "models.CharField(max_length=100) # This model is purposefully specific to ASN bank transactions, as that", "django.db.models import Sum from django.utils.translation import ugettext_lazy as _ from django.core.exceptions import ValidationError", "'Other account'), ) account_type = models.CharField(max_length=10, choices=ACCOUNT_TYPES, default='other') def __str__(self): return self.iban class", "account_currency = models.CharField(max_length=3) balance_before = models.DecimalField(max_digits=12, decimal_places=2) mutation_currency = models.CharField(max_length=3) mutation_value = models.DecimalField(max_digits=12,", "ACCOUNT_TYPES = ( ('checking', 'Checking account'), ('savings', 'Savings account'), # ('investment', 'Investment account'),", "bank uses this field to internally identify transaction types. # These are translated", "generic) global code. internal_code = models.IntegerField() BOOKING_CODES = ( ('ACC', 'Acceptgirobetaling'), ('AF', 'Afboeking'),", "cannot exceed transaction!\") class ExpectedTransaction(models.Model): from_date = models.DateField(blank=True, null=True) to_date = models.DateField(blank=True, null=True)", "= models.ImageField() outgoing = models.ManyToManyField(Flow, blank=True) payment = models.ManyToManyField(Flow, blank=True, related_name='repaid_loans') class InterestPeriod(models.Model):", "payment = models.ManyToManyField(Flow, blank=True, related_name='repaid_loans') class InterestPeriod(models.Model): annual_percentage = models.DecimalField(max_digits=5, decimal_places=2) from_date =", "choices=ACCOUNT_TYPES, default='other') def __str__(self): return self.iban class Category(models.Model): name = models.CharField(max_length=100) # This", "no use-cases. repeat_after_months = models.IntegerField(blank=True, null=True) # Initialize flow once the transaction has", "= models.CharField(max_length=10, choices=ACCOUNT_TYPES, default='other') def __str__(self): return self.iban class Category(models.Model): name = models.CharField(max_length=100)", "This model is purposefully specific to ASN bank transactions, as that is #", "category = models.ForeignKey(Category, blank=True, null=True) def clean(self): flow_sum = (Flow.objects.filter(transaction=self.transaction) .exclude(pk=self.pk) .aggregate(Sum('value'))) if", "from_date = models.DateField(blank=True, null=True) to_date = models.DateField(blank=True, null=True) from_value = models.DecimalField(max_digits=12, decimal_places=2, blank=True,", "betaling via mobiel'), ('IOB', 'Interne Overboeking'), ('KAS', 'Kas post'), ('KTN', 'Kosten/provisies'), ('KST', 'Kosten/provisies'),", "occurred flows = models.ManyToManyField(Flow, blank=True) class Loan(models.Model): title = models.CharField(max_length=100) description = models.TextField(blank=True,", "'Afbetalen'), ('BEA', 'Betaalautomaat'), ('BIJ', 'Bijboeking'), ('BTL', 'Buitenlandse Overboeking'), ('CHP', 'Chipknip'), ('CHQ', 'Cheque'), ('COR',", "('KST', 'Kosten/provisies'), ('OVB', 'Overboeking'), ('PRM', 'Premies'), ('PRV', 'Provisies'), ('RNT', 'Rente'), ('STO', 'Storno'), ('TEL',", "class Loan(models.Model): title = models.CharField(max_length=100) description = models.TextField(blank=True, null=True) receipt = models.ImageField() outgoing", "models.DecimalField(max_digits=5, decimal_places=2) from_date = models.DateField() to_date = models.DateField(blank=True, null=True) # NULL signifies today", "exceed transaction!\") class ExpectedTransaction(models.Model): from_date = models.DateField(blank=True, null=True) to_date = models.DateField(blank=True, null=True) from_value", "# Initialize flow once the transaction has occurred flows = models.ManyToManyField(Flow, blank=True) class", "# This could be much more generic, but as of yet there are", "decimal_places=2, blank=True, null=True) to_value = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True) account = models.ForeignKey(Account, blank=True,", "blank=True, null=True) to_value = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True) account = models.ForeignKey(Account, blank=True, null=True)", "null=True) receipt = models.ImageField() outgoing = models.ManyToManyField(Flow, blank=True) payment = models.ManyToManyField(Flow, blank=True, related_name='repaid_loans')", "('IMB', 'iDEAL betaling via mobiel'), ('IOB', 'Interne Overboeking'), ('KAS', 'Kas post'), ('KTN', 'Kosten/provisies'),", "transaction has occurred flows = models.ManyToManyField(Flow, blank=True) class Loan(models.Model): title = models.CharField(max_length=100) description", "models.ManyToManyField(Flow, blank=True) payment = models.ManyToManyField(Flow, blank=True, related_name='repaid_loans') class InterestPeriod(models.Model): annual_percentage = models.DecimalField(max_digits=5, decimal_places=2)", "models.DateField() account = models.ForeignKey(Account) counter_account = models.ForeignKey(Account, related_name='counter_transactions') counter_name = models.CharField(max_length=70) account_currency =", "'iDEAL betaling via mobiel'), ('IOB', 'Interne Overboeking'), ('KAS', 'Kas post'), ('KTN', 'Kosten/provisies'), ('KST',", "models.DecimalField(max_digits=12, decimal_places=2) category = models.ForeignKey(Category, blank=True, null=True) def clean(self): flow_sum = (Flow.objects.filter(transaction=self.transaction) .exclude(pk=self.pk)", "but as of yet there are no use-cases. repeat_after_months = models.IntegerField(blank=True, null=True) #", "account'), ('savings', 'Savings account'), # ('investment', 'Investment account'), # TODO how to differentiate", ") account_type = models.CharField(max_length=10, choices=ACCOUNT_TYPES, default='other') def __str__(self): return self.iban class Category(models.Model): name", "models.ForeignKey( Account, blank=True, null=True, related_name='counter_expected_transactions' ) category = models.ForeignKey(Category, blank=True, null=True) # This", "transaction!\") class ExpectedTransaction(models.Model): from_date = models.DateField(blank=True, null=True) to_date = models.DateField(blank=True, null=True) from_value =", "max_length=34, unique=True) ACCOUNT_TYPES = ( ('checking', 'Checking account'), ('savings', 'Savings account'), # ('investment',", "_ from django.core.exceptions import ValidationError class Account(models.Model): iban = models.CharField(_('iban'), max_length=34, unique=True) ACCOUNT_TYPES", "= models.CharField(max_length=3) mutation_value = models.DecimalField(max_digits=12, decimal_places=2) journal_date = models.DateField() value_date = models.DateField() #", "(potentially more generic) global code. internal_code = models.IntegerField() BOOKING_CODES = ( ('ACC', 'Acceptgirobetaling'),", "bank transactions, as that is # the main use case for development of", "is purposefully specific to ASN bank transactions, as that is # the main", "account'), # ('investment', 'Investment account'), # TODO how to differentiate ('secondparty', 'Second party", "'Effectenboeking'), ('ETC', 'Euro traveller cheques'), ('GBK', 'GiroBetaalkaart'), ('GEA', 'Geldautomaat'), ('INC', 'Incasso'), ('IDB', 'iDEAL", "related_name='counter_expected_transactions' ) category = models.ForeignKey(Category, blank=True, null=True) # This could be much more", "account = models.ForeignKey(Account) counter_account = models.ForeignKey(Account, related_name='counter_transactions') counter_name = models.CharField(max_length=70) account_currency = models.CharField(max_length=3)", "translated to the (potentially more generic) global code. internal_code = models.IntegerField() BOOKING_CODES =", "from django.core.exceptions import ValidationError class Account(models.Model): iban = models.CharField(_('iban'), max_length=34, unique=True) ACCOUNT_TYPES =", "+ self.value) > abs(self.transaction.value): raise ValidationError(\"Sum of flows cannot exceed transaction!\") class ExpectedTransaction(models.Model):", "('RNT', 'Rente'), ('STO', 'Storno'), ('TEL', 'Telefonische Overboeking'), ('VV', 'Vreemde valuta'), ) global_code =", ".aggregate(Sum('value'))) if abs(flow_sum + self.value) > abs(self.transaction.value): raise ValidationError(\"Sum of flows cannot exceed", "= ( ('checking', 'Checking account'), ('savings', 'Savings account'), # ('investment', 'Investment account'), #", "transactions, as that is # the main use case for development of bogrod", "null=True) to_value = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True) account = models.ForeignKey(Account, blank=True, null=True) counter_account", "statement_number = models.IntegerField() class Meta: unique_together = ('sequence_number', 'journal_date') class Flow(models.Model): transaction =", "mutation_value = models.DecimalField(max_digits=12, decimal_places=2) journal_date = models.DateField() value_date = models.DateField() # ASN bank", "repeat_after_months = models.IntegerField(blank=True, null=True) # Initialize flow once the transaction has occurred flows", "# ('investment', 'Investment account'), # TODO how to differentiate ('secondparty', 'Second party account'),", "import ugettext_lazy as _ from django.core.exceptions import ValidationError class Account(models.Model): iban = models.CharField(_('iban'),", "('OVB', 'Overboeking'), ('PRM', 'Premies'), ('PRV', 'Provisies'), ('RNT', 'Rente'), ('STO', 'Storno'), ('TEL', 'Telefonische Overboeking'),", "abs(flow_sum + self.value) > abs(self.transaction.value): raise ValidationError(\"Sum of flows cannot exceed transaction!\") class", "InterestPeriod(models.Model): annual_percentage = models.DecimalField(max_digits=5, decimal_places=2) from_date = models.DateField() to_date = models.DateField(blank=True, null=True) #", "is # the main use case for development of bogrod at the moment.", "models.ManyToManyField(Flow, blank=True) class Loan(models.Model): title = models.CharField(max_length=100) description = models.TextField(blank=True, null=True) receipt =", "models.CharField(max_length=3) balance_before = models.DecimalField(max_digits=12, decimal_places=2) mutation_currency = models.CharField(max_length=3) mutation_value = models.DecimalField(max_digits=12, decimal_places=2) journal_date", ".exclude(pk=self.pk) .aggregate(Sum('value'))) if abs(flow_sum + self.value) > abs(self.transaction.value): raise ValidationError(\"Sum of flows cannot", "flows cannot exceed transaction!\") class ExpectedTransaction(models.Model): from_date = models.DateField(blank=True, null=True) to_date = models.DateField(blank=True,", "decimal_places=2) category = models.ForeignKey(Category, blank=True, null=True) def clean(self): flow_sum = (Flow.objects.filter(transaction=self.transaction) .exclude(pk=self.pk) .aggregate(Sum('value')))", "('VV', 'Vreemde valuta'), ) global_code = models.CharField(max_length=3, choices=BOOKING_CODES) sequence_number = models.IntegerField() reference =", "global code. internal_code = models.IntegerField() BOOKING_CODES = ( ('ACC', 'Acceptgirobetaling'), ('AF', 'Afboeking'), ('AFB',", "('PRM', 'Premies'), ('PRV', 'Provisies'), ('RNT', 'Rente'), ('STO', 'Storno'), ('TEL', 'Telefonische Overboeking'), ('VV', 'Vreemde", "('CHQ', 'Cheque'), ('COR', 'Correctie'), ('DIV', 'Diversen'), ('EFF', 'Effectenboeking'), ('ETC', 'Euro traveller cheques'), ('GBK',", "iban = models.CharField(_('iban'), max_length=34, unique=True) ACCOUNT_TYPES = ( ('checking', 'Checking account'), ('savings', 'Savings", "= models.CharField(_('iban'), max_length=34, unique=True) ACCOUNT_TYPES = ( ('checking', 'Checking account'), ('savings', 'Savings account'),", "cheques'), ('GBK', 'GiroBetaalkaart'), ('GEA', 'Geldautomaat'), ('INC', 'Incasso'), ('IDB', 'iDEAL betaling'), ('IMB', 'iDEAL betaling", "counter_account = models.ForeignKey(Account, related_name='counter_transactions') counter_name = models.CharField(max_length=70) account_currency = models.CharField(max_length=3) balance_before = models.DecimalField(max_digits=12,", "('ACC', 'Acceptgirobetaling'), ('AF', 'Afboeking'), ('AFB', 'Afbetalen'), ('BEA', 'Betaalautomaat'), ('BIJ', 'Bijboeking'), ('BTL', 'Buitenlandse Overboeking'),", "of bogrod at the moment. class Transaction(models.Model): booking_date = models.DateField() account = models.ForeignKey(Account)", "= models.CharField(max_length=70) account_currency = models.CharField(max_length=3) balance_before = models.DecimalField(max_digits=12, decimal_places=2) mutation_currency = models.CharField(max_length=3) mutation_value", "Account(models.Model): iban = models.CharField(_('iban'), max_length=34, unique=True) ACCOUNT_TYPES = ( ('checking', 'Checking account'), ('savings',", "= models.CharField(max_length=140) statement_number = models.IntegerField() class Meta: unique_together = ('sequence_number', 'journal_date') class Flow(models.Model):", "counter_name = models.CharField(max_length=70) account_currency = models.CharField(max_length=3) balance_before = models.DecimalField(max_digits=12, decimal_places=2) mutation_currency = models.CharField(max_length=3)", "'Rente'), ('STO', 'Storno'), ('TEL', 'Telefonische Overboeking'), ('VV', 'Vreemde valuta'), ) global_code = models.CharField(max_length=3,", "account = models.ForeignKey(Account, blank=True, null=True) counter_account = models.ForeignKey( Account, blank=True, null=True, related_name='counter_expected_transactions' )", "models.ManyToManyField(Flow, blank=True, related_name='repaid_loans') class InterestPeriod(models.Model): annual_percentage = models.DecimalField(max_digits=5, decimal_places=2) from_date = models.DateField() to_date", "flow once the transaction has occurred flows = models.ManyToManyField(Flow, blank=True) class Loan(models.Model): title", "description = models.CharField(max_length=140) statement_number = models.IntegerField() class Meta: unique_together = ('sequence_number', 'journal_date') class", "blank=True, null=True, related_name='counter_expected_transactions' ) category = models.ForeignKey(Category, blank=True, null=True) # This could be", "models.DateField() # ASN bank uses this field to internally identify transaction types. #", "('STO', 'Storno'), ('TEL', 'Telefonische Overboeking'), ('VV', 'Vreemde valuta'), ) global_code = models.CharField(max_length=3, choices=BOOKING_CODES)", "models.ForeignKey(Category, blank=True, null=True) # This could be much more generic, but as of", "from_date = models.DateField() to_date = models.DateField(blank=True, null=True) # NULL signifies today loan =", "from_value = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True) to_value = models.DecimalField(max_digits=12, decimal_places=2, blank=True, null=True) account", "the transaction has occurred flows = models.ManyToManyField(Flow, blank=True) class Loan(models.Model): title = models.CharField(max_length=100)", "use case for development of bogrod at the moment. class Transaction(models.Model): booking_date =", "as of yet there are no use-cases. repeat_after_months = models.IntegerField(blank=True, null=True) # Initialize", "null=True) counter_account = models.ForeignKey( Account, blank=True, null=True, related_name='counter_expected_transactions' ) category = models.ForeignKey(Category, blank=True,", "blank=True, null=True) account = models.ForeignKey(Account, blank=True, null=True) counter_account = models.ForeignKey( Account, blank=True, null=True,", "null=True) # This could be much more generic, but as of yet there", "has occurred flows = models.ManyToManyField(Flow, blank=True) class Loan(models.Model): title = models.CharField(max_length=100) description =", "models.IntegerField(blank=True, null=True) # Initialize flow once the transaction has occurred flows = models.ManyToManyField(Flow,", "Transaction(models.Model): booking_date = models.DateField() account = models.ForeignKey(Account) counter_account = models.ForeignKey(Account, related_name='counter_transactions') counter_name =", "'Vreemde valuta'), ) global_code = models.CharField(max_length=3, choices=BOOKING_CODES) sequence_number = models.IntegerField() reference = models.CharField(max_length=16)", "field to internally identify transaction types. # These are translated to the (potentially", "class Transaction(models.Model): booking_date = models.DateField() account = models.ForeignKey(Account) counter_account = models.ForeignKey(Account, related_name='counter_transactions') counter_name" ]
[ "self.setupUi(self) self.pushApply.clicked.connect(self.plotnet) self.data_layers = [] self.canvas = MyMplCanvas(self) self.toolbar = NavigationToolbar(self.canvas, self) self.mplLayout.addWidget(self.canvas)", "plt.subplots() # t = np.arange(0.0, 3.0, 0.01) # s = np.sin(2 * np.pi", "the terms of the GNU General Public License as published by * *", "layer.name()) label = repr(g) if self.checkLabels.isChecked() else None # contours if self.opt(idx, QtWidgets.QCheckBox,", "= self.net.fig.axes[self.net.active].tricontourf(d.triang, d.values, levels, **kwargs) self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, colors=\"k\") else: self.net.contourf(StereoGrid(g), levels=nlevels, sigma=sigma)", "Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/ ------------------- begin : 2018-11-03 git sha : $Format:%H$ copyright : (C)", "levels=nlevels, sigma=sigma) else: if qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, **kwargs) else: self.net.contour(StereoGrid(g),", "if qgis_qhull_fails: if legend: ab = self.net.fig.axes[self.net.active].get_position().bounds cbaxes = self.net.fig.add_axes([0.1, ab[1] + 0.1", "self.opt(idx, QtWidgets.QCheckBox, 'checkAsPoles').isChecked(): self.net.pole(g, marker=marker, markersize=markersize, label=label) else: self.net.plane(g, label=label) else: if self.opt(idx,", "else: self.net.plane(g, label=label) else: if self.opt(idx, QtWidgets.QCheckBox, 'checkShowData').isChecked(): self.net.line(g, marker=marker, markersize=markersize, label=label) self.canvas.draw()", "[] self.canvas = MyMplCanvas(self) self.toolbar = NavigationToolbar(self.canvas, self) self.mplLayout.addWidget(self.canvas) self.mplLayout.addWidget(self.toolbar) self.net = self.canvas.net", "self.net.fig) self.setParent(parent) FigureCanvas.setSizePolicy(self, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) FigureCanvas.updateGeometry(self) class ReadSDBPlotDialog(QtWidgets.QDialog, FORM_CLASS): def __init__(self, readsdb, parent=None):", "for f in features], layer.name()) label = repr(g) if self.checkLabels.isChecked() else None #", "layer.getFeatures() # Create data Group if layer._is_planar: g = Group([Fol(f.attribute('azi'), f.attribute('inc')) for f", "* * (at your option) any later version. * * * ***************************************************************************/ \"\"\"", "ab = self.net.fig.axes[self.net.active].get_position().bounds cbaxes = self.net.fig.add_axes([0.1, ab[1] + 0.1 * ab[3], 0.03, 0.8", "parent=None): \"\"\"Constructor.\"\"\" super(ReadSDBPlotDialog, self).__init__(parent, Qt.WindowStaysOnTopHint) # Set up the user interface from Designer.", "else: self.net.contour(StereoGrid(g), levels=nlevels, sigma=sigma) if qgis_qhull_fails: if legend: ab = self.net.fig.axes[self.net.active].get_position().bounds cbaxes =", "uic from PyQt5 import QtWidgets from PyQt5.QtCore import Qt from qgis.core import *", "import StereoGrid as StereoGridQGIS FORM_CLASS, _ = uic.loadUiType(os.path.join( os.path.dirname(__file__), 'ui/readsdb_plot.ui')) class MyMplCanvas(FigureCanvas): \"\"\"Ultimately,", "= d.values.max() levels = np.linspace(mn, mx, nlevels) levels[-1] += 1e-8 legend = True", ": 2018-11-03 git sha : $Format:%H$ copyright : (C) 2018 by <NAME> email", "PyQt5 import uic from PyQt5 import QtWidgets from PyQt5.QtCore import Qt from qgis.core", "features = layer.getFeatures() # Create data Group if layer._is_planar: g = Group([Fol(f.attribute('azi'), f.attribute('inc'))", "Software Foundation; either version 2 of the License, or * * (at your", "in self.data_layers[::-1]: # plot in right order if layer.selectedFeatureCount(): features = layer.getSelectedFeatures() else:", "program is free software; you can redistribute it and/or modify * * it", "from .stereogrid_workaround import StereoGrid as StereoGridQGIS FORM_CLASS, _ = uic.loadUiType(os.path.join( os.path.dirname(__file__), 'ui/readsdb_plot.ui')) class", "= self.net.fig.add_axes([0.1, ab[1] + 0.1 * ab[3], 0.03, 0.8 * ab[3]]) cb =", "label = repr(g) if self.checkLabels.isChecked() else None # contours if self.opt(idx, QtWidgets.QCheckBox, 'checkContours').isChecked():", "= self.opt(idx, QtWidgets.QSpinBox, 'spinSize').value() marker = self.opt(idx, QtWidgets.QComboBox, 'comboStyle').currentText() if layer._is_planar: if self.opt(idx,", "d.values, levels, colors=\"k\") else: self.net.contourf(StereoGrid(g), levels=nlevels, sigma=sigma) else: if qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontour(d.triang,", "/*************************************************************************** * * * This program is free software; you can redistribute it", "if self.opt(idx, QtWidgets.QCheckBox, 'checkAsPoles').isChecked(): self.net.pole(g, marker=marker, markersize=markersize, label=label) else: self.net.plane(g, label=label) else: if", "(C) 2018 by <NAME> email : <EMAIL> ***************************************************************************/ /*************************************************************************** * * * This", ".stereogrid_workaround import StereoGrid as StereoGridQGIS FORM_CLASS, _ = uic.loadUiType(os.path.join( os.path.dirname(__file__), 'ui/readsdb_plot.ui')) class MyMplCanvas(FigureCanvas):", "# plot data markersize = self.opt(idx, QtWidgets.QSpinBox, 'spinSize').value() marker = self.opt(idx, QtWidgets.QComboBox, 'comboStyle').currentText()", "2 of the License, or * * (at your option) any later version.", "NavigationToolbar from apsg import * # qhull workaroud import platform qgis_qhull_fails = platform.platform().startswith('Linux')", "'checkEigPlanes').isChecked() eigl = self.opt(idx, QtWidgets.QCheckBox, 'checkEigLines').isChecked() self.net.tensor(g.ortensor, eigenfols=eigf, eigenlins=eigl) # plot data markersize", "if layer.selectedFeatureCount(): features = layer.getSelectedFeatures() else: features = layer.getFeatures() # Create data Group", "kwargs = {'cmap': 'Greys', 'zorder': 1} d = StereoGridQGIS(g, sigma=sigma) mn = d.values.min()", "* * * This program is free software; you can redistribute it and/or", "qgis_qhull_fails: kwargs = {'cmap': 'Greys', 'zorder': 1} d = StereoGridQGIS(g, sigma=sigma) mn =", "FigureCanvas.__init__(self, self.net.fig) self.setParent(parent) FigureCanvas.setSizePolicy(self, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) FigureCanvas.updateGeometry(self) class ReadSDBPlotDialog(QtWidgets.QDialog, FORM_CLASS): def __init__(self, readsdb,", "from qgis.core import * import matplotlib # Make sure that we are using", "= Group([Fol(f.attribute('azi'), f.attribute('inc')) for f in features], layer.name()) else: g = Group([Lin(f.attribute('azi'), f.attribute('inc'))", "colors=\"k\") else: self.net.contourf(StereoGrid(g), levels=nlevels, sigma=sigma) else: if qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels,", "import NavigationToolbar2QT as NavigationToolbar from apsg import * # qhull workaroud import platform", "else None # contours if self.opt(idx, QtWidgets.QCheckBox, 'checkContours').isChecked(): nlevels = self.opt(idx, QtWidgets.QSpinBox, 'spinLevels').value()", "* ab[3], 0.03, 0.8 * ab[3]]) cb = self.net.fig.colorbar(cs, cax=cbaxes) if label: cb.ax.set_title(label)", "version 2 of the License, or * * (at your option) any later", "NavigationToolbar2QT as NavigationToolbar from apsg import * # qhull workaroud import platform qgis_qhull_fails", "by * * the Free Software Foundation; either version 2 of the License,", "= MyMplCanvas(self) self.toolbar = NavigationToolbar(self.canvas, self) self.mplLayout.addWidget(self.canvas) self.mplLayout.addWidget(self.toolbar) self.net = self.canvas.net def opt(self,", "eigf = self.opt(idx, QtWidgets.QCheckBox, 'checkEigPlanes').isChecked() eigl = self.opt(idx, QtWidgets.QCheckBox, 'checkEigLines').isChecked() self.net.tensor(g.ortensor, eigenfols=eigf, eigenlins=eigl)", "QtWidgets.QCheckBox, 'checkAsPoles').isChecked(): self.net.pole(g, marker=marker, markersize=markersize, label=label) else: self.net.plane(g, label=label) else: if self.opt(idx, QtWidgets.QCheckBox,", "cb.ax.set_title(label) # principal eigf = self.opt(idx, QtWidgets.QCheckBox, 'checkEigPlanes').isChecked() eigl = self.opt(idx, QtWidgets.QCheckBox, 'checkEigLines').isChecked()", "the License, or * * (at your option) any later version. * *", "opt(self, index, type, name): return self.tabWidget.widget(index).findChild(type, name) def plotnet(self): self.net.grid = self.checkGrid.isChecked() self.net.cla()", "Create data Group if layer._is_planar: g = Group([Fol(f.attribute('azi'), f.attribute('inc')) for f in features],", "of the License, or * * (at your option) any later version. *", "self.net.contourf(StereoGrid(g), levels=nlevels, sigma=sigma) else: if qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, **kwargs) else:", "self.mplLayout.addWidget(self.toolbar) self.net = self.canvas.net def opt(self, index, type, name): return self.tabWidget.widget(index).findChild(type, name) def", "data into QGIS Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/ ------------------- begin : 2018-11-03 git", "import platform qgis_qhull_fails = platform.platform().startswith('Linux') if qgis_qhull_fails: from .stereogrid_workaround import StereoGrid as StereoGridQGIS", "levels, colors=\"k\") else: self.net.contourf(StereoGrid(g), levels=nlevels, sigma=sigma) else: if qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontour(d.triang, d.values,", "self.net.fig.axes[self.net.active].get_position().bounds cbaxes = self.net.fig.add_axes([0.1, ab[1] + 0.1 * ab[3], 0.03, 0.8 * ab[3]])", "= platform.platform().startswith('Linux') if qgis_qhull_fails: from .stereogrid_workaround import StereoGrid as StereoGridQGIS FORM_CLASS, _ =", "if label: cb.ax.set_title(label) # principal eigf = self.opt(idx, QtWidgets.QCheckBox, 'checkEigPlanes').isChecked() eigl = self.opt(idx,", "Group([Fol(f.attribute('azi'), f.attribute('inc')) for f in features], layer.name()) else: g = Group([Lin(f.attribute('azi'), f.attribute('inc')) for", "levels, **kwargs) else: self.net.contour(StereoGrid(g), levels=nlevels, sigma=sigma) if qgis_qhull_fails: if legend: ab = self.net.fig.axes[self.net.active].get_position().bounds", "This program is free software; you can redistribute it and/or modify * *", "matplotlib.use('Qt5Agg') import numpy as np import matplotlib.pyplot as plt from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg", "doing # self.<objectname>, and you can use autoconnect slots - see # http://qt-project.org/doc/qt-4.8/designer-using-a-ui-file.html", "QtWidgets.QCheckBox, 'checkContoursFilled').isChecked(): if qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontourf(d.triang, d.values, levels, **kwargs) self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels,", "<NAME> email : <EMAIL> ***************************************************************************/ /*************************************************************************** * * * This program is free", "super(ReadSDBPlotDialog, self).__init__(parent, Qt.WindowStaysOnTopHint) # Set up the user interface from Designer. # After", "s = np.sin(2 * np.pi * t) # self.axes.plot(t, s) self.net = StereoNet()", "if layer._is_planar: if self.opt(idx, QtWidgets.QCheckBox, 'checkShowData').isChecked(): if self.opt(idx, QtWidgets.QCheckBox, 'checkAsPoles').isChecked(): self.net.pole(g, marker=marker, markersize=markersize,", "'checkShowData').isChecked(): if self.opt(idx, QtWidgets.QCheckBox, 'checkAsPoles').isChecked(): self.net.pole(g, marker=marker, markersize=markersize, label=label) else: self.net.plane(g, label=label) else:", "= self.net.fig.colorbar(cs, cax=cbaxes) if label: cb.ax.set_title(label) # principal eigf = self.opt(idx, QtWidgets.QCheckBox, 'checkEigPlanes').isChecked()", "self.net.cla() for idx, layer in self.data_layers[::-1]: # plot in right order if layer.selectedFeatureCount():", "uic.loadUiType(os.path.join( os.path.dirname(__file__), 'ui/readsdb_plot.ui')) class MyMplCanvas(FigureCanvas): \"\"\"Ultimately, this is a QWidget (as well as", "email : <EMAIL> ***************************************************************************/ /*************************************************************************** * * * This program is free software;", "self.net.contour(StereoGrid(g), levels=nlevels, sigma=sigma) if qgis_qhull_fails: if legend: ab = self.net.fig.axes[self.net.active].get_position().bounds cbaxes = self.net.fig.add_axes([0.1,", "ab[3], 0.03, 0.8 * ab[3]]) cb = self.net.fig.colorbar(cs, cax=cbaxes) if label: cb.ax.set_title(label) #", "d.values.max() levels = np.linspace(mn, mx, nlevels) levels[-1] += 1e-8 legend = True if", "FigureCanvasAgg, etc.).\"\"\" def __init__(self, parent=None): # fig, self.axes = plt.subplots() # t =", "= self.opt(idx, QtWidgets.QComboBox, 'comboStyle').currentText() if layer._is_planar: if self.opt(idx, QtWidgets.QCheckBox, 'checkShowData').isChecked(): if self.opt(idx, QtWidgets.QCheckBox,", "f in features], layer.name()) else: g = Group([Lin(f.attribute('azi'), f.attribute('inc')) for f in features],", "t = np.arange(0.0, 3.0, 0.01) # s = np.sin(2 * np.pi * t)", "* ***************************************************************************/ \"\"\" import os from PyQt5 import uic from PyQt5 import QtWidgets", "qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontourf(d.triang, d.values, levels, **kwargs) self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, colors=\"k\") else: self.net.contourf(StereoGrid(g),", "# self.<objectname>, and you can use autoconnect slots - see # http://qt-project.org/doc/qt-4.8/designer-using-a-ui-file.html #", "cs = self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, **kwargs) else: self.net.contour(StereoGrid(g), levels=nlevels, sigma=sigma) if qgis_qhull_fails: if", "= np.sin(2 * np.pi * t) # self.axes.plot(t, s) self.net = StereoNet() FigureCanvas.__init__(self,", "0.03, 0.8 * ab[3]]) cb = self.net.fig.colorbar(cs, cax=cbaxes) if label: cb.ax.set_title(label) # principal", "(at your option) any later version. * * * ***************************************************************************/ \"\"\" import os", "into QGIS Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/ ------------------- begin : 2018-11-03 git sha", "d.values, levels, **kwargs) self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, colors=\"k\") else: self.net.contourf(StereoGrid(g), levels=nlevels, sigma=sigma) else: if", "***************************************************************************/ \"\"\" import os from PyQt5 import uic from PyQt5 import QtWidgets from", "software; you can redistribute it and/or modify * * it under the terms", "from PyQt5 import uic from PyQt5 import QtWidgets from PyQt5.QtCore import Qt from", "modify * * it under the terms of the GNU General Public License", "= layer.getFeatures() # Create data Group if layer._is_planar: g = Group([Fol(f.attribute('azi'), f.attribute('inc')) for", "in features], layer.name()) else: g = Group([Lin(f.attribute('azi'), f.attribute('inc')) for f in features], layer.name())", "{'cmap': 'Greys', 'zorder': 1} d = StereoGridQGIS(g, sigma=sigma) mn = d.values.min() mx =", "Free Software Foundation; either version 2 of the License, or * * (at", "$Format:%H$ copyright : (C) 2018 by <NAME> email : <EMAIL> ***************************************************************************/ /*************************************************************************** *", "= StereoGridQGIS(g, sigma=sigma) mn = d.values.min() mx = d.values.max() levels = np.linspace(mn, mx,", "can redistribute it and/or modify * * it under the terms of the", "def opt(self, index, type, name): return self.tabWidget.widget(index).findChild(type, name) def plotnet(self): self.net.grid = self.checkGrid.isChecked()", "levels, **kwargs) self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, colors=\"k\") else: self.net.contourf(StereoGrid(g), levels=nlevels, sigma=sigma) else: if qgis_qhull_fails:", "***************************************************************************/ /*************************************************************************** * * * This program is free software; you can redistribute", "Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/ ------------------- begin : 2018-11-03 git sha : $Format:%H$", "layer.name()) else: g = Group([Lin(f.attribute('azi'), f.attribute('inc')) for f in features], layer.name()) label =", "* the Free Software Foundation; either version 2 of the License, or *", "or * * (at your option) any later version. * * * ***************************************************************************/", "qgis.core import * import matplotlib # Make sure that we are using QT5", "* * ***************************************************************************/ \"\"\" import os from PyQt5 import uic from PyQt5 import", "'checkContoursFilled').isChecked(): if qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontourf(d.triang, d.values, levels, **kwargs) self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, colors=\"k\")", "FigureCanvas.updateGeometry(self) class ReadSDBPlotDialog(QtWidgets.QDialog, FORM_CLASS): def __init__(self, readsdb, parent=None): \"\"\"Constructor.\"\"\" super(ReadSDBPlotDialog, self).__init__(parent, Qt.WindowStaysOnTopHint) #", "contours if self.opt(idx, QtWidgets.QCheckBox, 'checkContours').isChecked(): nlevels = self.opt(idx, QtWidgets.QSpinBox, 'spinLevels').value() sigma = self.opt(idx,", "is free software; you can redistribute it and/or modify * * it under", "import FigureCanvasQTAgg as FigureCanvas from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar from apsg import", "levels[-1] += 1e-8 legend = True if self.opt(idx, QtWidgets.QCheckBox, 'checkContoursFilled').isChecked(): if qgis_qhull_fails: cs", "QWidget (as well as a FigureCanvasAgg, etc.).\"\"\" def __init__(self, parent=None): # fig, self.axes", "# principal eigf = self.opt(idx, QtWidgets.QCheckBox, 'checkEigPlanes').isChecked() eigl = self.opt(idx, QtWidgets.QCheckBox, 'checkEigLines').isChecked() self.net.tensor(g.ortensor,", "f in features], layer.name()) label = repr(g) if self.checkLabels.isChecked() else None # contours", "self.opt(idx, QtWidgets.QDoubleSpinBox, 'spinSigma').value() if qgis_qhull_fails: kwargs = {'cmap': 'Greys', 'zorder': 1} d =", "for idx, layer in self.data_layers[::-1]: # plot in right order if layer.selectedFeatureCount(): features", "name) def plotnet(self): self.net.grid = self.checkGrid.isChecked() self.net.cla() for idx, layer in self.data_layers[::-1]: #", "QtWidgets.QCheckBox, 'checkEigPlanes').isChecked() eigl = self.opt(idx, QtWidgets.QCheckBox, 'checkEigLines').isChecked() self.net.tensor(g.ortensor, eigenfols=eigf, eigenlins=eigl) # plot data", "QtWidgets.QSizePolicy.Expanding) FigureCanvas.updateGeometry(self) class ReadSDBPlotDialog(QtWidgets.QDialog, FORM_CLASS): def __init__(self, readsdb, parent=None): \"\"\"Constructor.\"\"\" super(ReadSDBPlotDialog, self).__init__(parent, Qt.WindowStaysOnTopHint)", "# http://qt-project.org/doc/qt-4.8/designer-using-a-ui-file.html # #widgets-and-dialogs-with-auto-connect self.setupUi(self) self.pushApply.clicked.connect(self.plotnet) self.data_layers = [] self.canvas = MyMplCanvas(self) self.toolbar", "------------------- begin : 2018-11-03 git sha : $Format:%H$ copyright : (C) 2018 by", "'checkContours').isChecked(): nlevels = self.opt(idx, QtWidgets.QSpinBox, 'spinLevels').value() sigma = self.opt(idx, QtWidgets.QDoubleSpinBox, 'spinSigma').value() if qgis_qhull_fails:", "self.axes.plot(t, s) self.net = StereoNet() FigureCanvas.__init__(self, self.net.fig) self.setParent(parent) FigureCanvas.setSizePolicy(self, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) FigureCanvas.updateGeometry(self) class", "levels = np.linspace(mn, mx, nlevels) levels[-1] += 1e-8 legend = True if self.opt(idx,", "= repr(g) if self.checkLabels.isChecked() else None # contours if self.opt(idx, QtWidgets.QCheckBox, 'checkContours').isChecked(): nlevels", "for f in features], layer.name()) else: g = Group([Lin(f.attribute('azi'), f.attribute('inc')) for f in", "class ReadSDBPlotDialog(QtWidgets.QDialog, FORM_CLASS): def __init__(self, readsdb, parent=None): \"\"\"Constructor.\"\"\" super(ReadSDBPlotDialog, self).__init__(parent, Qt.WindowStaysOnTopHint) # Set", "eigenlins=eigl) # plot data markersize = self.opt(idx, QtWidgets.QSpinBox, 'spinSize').value() marker = self.opt(idx, QtWidgets.QComboBox,", "plot data markersize = self.opt(idx, QtWidgets.QSpinBox, 'spinSize').value() marker = self.opt(idx, QtWidgets.QComboBox, 'comboStyle').currentText() if", "/*************************************************************************** ReadSDBDialog A QGIS plugin Read PySDB structural data into QGIS Generated by", "self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, **kwargs) else: self.net.contour(StereoGrid(g), levels=nlevels, sigma=sigma) if qgis_qhull_fails: if legend: ab", "= [] self.canvas = MyMplCanvas(self) self.toolbar = NavigationToolbar(self.canvas, self) self.mplLayout.addWidget(self.canvas) self.mplLayout.addWidget(self.toolbar) self.net =", "= True if self.opt(idx, QtWidgets.QCheckBox, 'checkContoursFilled').isChecked(): if qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontourf(d.triang, d.values, levels,", "MyMplCanvas(self) self.toolbar = NavigationToolbar(self.canvas, self) self.mplLayout.addWidget(self.canvas) self.mplLayout.addWidget(self.toolbar) self.net = self.canvas.net def opt(self, index,", "sigma=sigma) mn = d.values.min() mx = d.values.max() levels = np.linspace(mn, mx, nlevels) levels[-1]", "'spinSigma').value() if qgis_qhull_fails: kwargs = {'cmap': 'Greys', 'zorder': 1} d = StereoGridQGIS(g, sigma=sigma)", "QtWidgets.QComboBox, 'comboStyle').currentText() if layer._is_planar: if self.opt(idx, QtWidgets.QCheckBox, 'checkShowData').isChecked(): if self.opt(idx, QtWidgets.QCheckBox, 'checkAsPoles').isChecked(): self.net.pole(g,", "#widgets-and-dialogs-with-auto-connect self.setupUi(self) self.pushApply.clicked.connect(self.plotnet) self.data_layers = [] self.canvas = MyMplCanvas(self) self.toolbar = NavigationToolbar(self.canvas, self)", "# qhull workaroud import platform qgis_qhull_fails = platform.platform().startswith('Linux') if qgis_qhull_fails: from .stereogrid_workaround import", "License as published by * * the Free Software Foundation; either version 2", "index, type, name): return self.tabWidget.widget(index).findChild(type, name) def plotnet(self): self.net.grid = self.checkGrid.isChecked() self.net.cla() for", "if qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, **kwargs) else: self.net.contour(StereoGrid(g), levels=nlevels, sigma=sigma) if", "ReadSDBDialog A QGIS plugin Read PySDB structural data into QGIS Generated by Plugin", "QtWidgets.QSpinBox, 'spinLevels').value() sigma = self.opt(idx, QtWidgets.QDoubleSpinBox, 'spinSigma').value() if qgis_qhull_fails: kwargs = {'cmap': 'Greys',", "# Create data Group if layer._is_planar: g = Group([Fol(f.attribute('azi'), f.attribute('inc')) for f in", "\"\"\" /*************************************************************************** ReadSDBDialog A QGIS plugin Read PySDB structural data into QGIS Generated", "True if self.opt(idx, QtWidgets.QCheckBox, 'checkContoursFilled').isChecked(): if qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontourf(d.triang, d.values, levels, **kwargs)", "if layer._is_planar: g = Group([Fol(f.attribute('azi'), f.attribute('inc')) for f in features], layer.name()) else: g", "as StereoGridQGIS FORM_CLASS, _ = uic.loadUiType(os.path.join( os.path.dirname(__file__), 'ui/readsdb_plot.ui')) class MyMplCanvas(FigureCanvas): \"\"\"Ultimately, this is", "copyright : (C) 2018 by <NAME> email : <EMAIL> ***************************************************************************/ /*************************************************************************** * *", "we are using QT5 matplotlib.use('Qt5Agg') import numpy as np import matplotlib.pyplot as plt", "name): return self.tabWidget.widget(index).findChild(type, name) def plotnet(self): self.net.grid = self.checkGrid.isChecked() self.net.cla() for idx, layer", "= StereoNet() FigureCanvas.__init__(self, self.net.fig) self.setParent(parent) FigureCanvas.setSizePolicy(self, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) FigureCanvas.updateGeometry(self) class ReadSDBPlotDialog(QtWidgets.QDialog, FORM_CLASS): def", "http://g-sherman.github.io/Qgis-Plugin-Builder/ ------------------- begin : 2018-11-03 git sha : $Format:%H$ copyright : (C) 2018", "mx, nlevels) levels[-1] += 1e-8 legend = True if self.opt(idx, QtWidgets.QCheckBox, 'checkContoursFilled').isChecked(): if", "self.net.fig.add_axes([0.1, ab[1] + 0.1 * ab[3], 0.03, 0.8 * ab[3]]) cb = self.net.fig.colorbar(cs,", ": $Format:%H$ copyright : (C) 2018 by <NAME> email : <EMAIL> ***************************************************************************/ /***************************************************************************", "self.canvas = MyMplCanvas(self) self.toolbar = NavigationToolbar(self.canvas, self) self.mplLayout.addWidget(self.canvas) self.mplLayout.addWidget(self.toolbar) self.net = self.canvas.net def", "autoconnect slots - see # http://qt-project.org/doc/qt-4.8/designer-using-a-ui-file.html # #widgets-and-dialogs-with-auto-connect self.setupUi(self) self.pushApply.clicked.connect(self.plotnet) self.data_layers = []", "PyQt5 import QtWidgets from PyQt5.QtCore import Qt from qgis.core import * import matplotlib", "= {'cmap': 'Greys', 'zorder': 1} d = StereoGridQGIS(g, sigma=sigma) mn = d.values.min() mx", "as NavigationToolbar from apsg import * # qhull workaroud import platform qgis_qhull_fails =", "= np.linspace(mn, mx, nlevels) levels[-1] += 1e-8 legend = True if self.opt(idx, QtWidgets.QCheckBox,", "self.<objectname>, and you can use autoconnect slots - see # http://qt-project.org/doc/qt-4.8/designer-using-a-ui-file.html # #widgets-and-dialogs-with-auto-connect", "Read PySDB structural data into QGIS Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/ ------------------- begin", "A QGIS plugin Read PySDB structural data into QGIS Generated by Plugin Builder:", "import uic from PyQt5 import QtWidgets from PyQt5.QtCore import Qt from qgis.core import", "= Group([Lin(f.attribute('azi'), f.attribute('inc')) for f in features], layer.name()) label = repr(g) if self.checkLabels.isChecked()", "g = Group([Fol(f.attribute('azi'), f.attribute('inc')) for f in features], layer.name()) else: g = Group([Lin(f.attribute('azi'),", "if qgis_qhull_fails: from .stereogrid_workaround import StereoGrid as StereoGridQGIS FORM_CLASS, _ = uic.loadUiType(os.path.join( os.path.dirname(__file__),", "d.values.min() mx = d.values.max() levels = np.linspace(mn, mx, nlevels) levels[-1] += 1e-8 legend", "can use autoconnect slots - see # http://qt-project.org/doc/qt-4.8/designer-using-a-ui-file.html # #widgets-and-dialogs-with-auto-connect self.setupUi(self) self.pushApply.clicked.connect(self.plotnet) self.data_layers", "self.toolbar = NavigationToolbar(self.canvas, self) self.mplLayout.addWidget(self.canvas) self.mplLayout.addWidget(self.toolbar) self.net = self.canvas.net def opt(self, index, type,", "principal eigf = self.opt(idx, QtWidgets.QCheckBox, 'checkEigPlanes').isChecked() eigl = self.opt(idx, QtWidgets.QCheckBox, 'checkEigLines').isChecked() self.net.tensor(g.ortensor, eigenfols=eigf,", "plt from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar", "QtWidgets.QSpinBox, 'spinSize').value() marker = self.opt(idx, QtWidgets.QComboBox, 'comboStyle').currentText() if layer._is_planar: if self.opt(idx, QtWidgets.QCheckBox, 'checkShowData').isChecked():", "'comboStyle').currentText() if layer._is_planar: if self.opt(idx, QtWidgets.QCheckBox, 'checkShowData').isChecked(): if self.opt(idx, QtWidgets.QCheckBox, 'checkAsPoles').isChecked(): self.net.pole(g, marker=marker,", "self.net.fig.colorbar(cs, cax=cbaxes) if label: cb.ax.set_title(label) # principal eigf = self.opt(idx, QtWidgets.QCheckBox, 'checkEigPlanes').isChecked() eigl", "# #widgets-and-dialogs-with-auto-connect self.setupUi(self) self.pushApply.clicked.connect(self.plotnet) self.data_layers = [] self.canvas = MyMplCanvas(self) self.toolbar = NavigationToolbar(self.canvas,", ": <EMAIL> ***************************************************************************/ /*************************************************************************** * * * This program is free software; you", "'checkAsPoles').isChecked(): self.net.pole(g, marker=marker, markersize=markersize, label=label) else: self.net.plane(g, label=label) else: if self.opt(idx, QtWidgets.QCheckBox, 'checkShowData').isChecked():", "marker=marker, markersize=markersize, label=label) else: self.net.plane(g, label=label) else: if self.opt(idx, QtWidgets.QCheckBox, 'checkShowData').isChecked(): self.net.line(g, marker=marker,", "qgis_qhull_fails: from .stereogrid_workaround import StereoGrid as StereoGridQGIS FORM_CLASS, _ = uic.loadUiType(os.path.join( os.path.dirname(__file__), 'ui/readsdb_plot.ui'))", "self.checkLabels.isChecked() else None # contours if self.opt(idx, QtWidgets.QCheckBox, 'checkContours').isChecked(): nlevels = self.opt(idx, QtWidgets.QSpinBox,", "QT5 matplotlib.use('Qt5Agg') import numpy as np import matplotlib.pyplot as plt from matplotlib.backends.backend_qt5agg import", "Foundation; either version 2 of the License, or * * (at your option)", "* # qhull workaroud import platform qgis_qhull_fails = platform.platform().startswith('Linux') if qgis_qhull_fails: from .stereogrid_workaround", "up the user interface from Designer. # After setupUI you can access any", "(as well as a FigureCanvasAgg, etc.).\"\"\" def __init__(self, parent=None): # fig, self.axes =", "from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar from", "self.opt(idx, QtWidgets.QCheckBox, 'checkEigPlanes').isChecked() eigl = self.opt(idx, QtWidgets.QCheckBox, 'checkEigLines').isChecked() self.net.tensor(g.ortensor, eigenfols=eigf, eigenlins=eigl) # plot", "either version 2 of the License, or * * (at your option) any", "if qgis_qhull_fails: kwargs = {'cmap': 'Greys', 'zorder': 1} d = StereoGridQGIS(g, sigma=sigma) mn", "'spinSize').value() marker = self.opt(idx, QtWidgets.QComboBox, 'comboStyle').currentText() if layer._is_planar: if self.opt(idx, QtWidgets.QCheckBox, 'checkShowData').isChecked(): if", "by <NAME> email : <EMAIL> ***************************************************************************/ /*************************************************************************** * * * This program is", "0.8 * ab[3]]) cb = self.net.fig.colorbar(cs, cax=cbaxes) if label: cb.ax.set_title(label) # principal eigf", "if self.opt(idx, QtWidgets.QCheckBox, 'checkShowData').isChecked(): if self.opt(idx, QtWidgets.QCheckBox, 'checkAsPoles').isChecked(): self.net.pole(g, marker=marker, markersize=markersize, label=label) else:", "0.01) # s = np.sin(2 * np.pi * t) # self.axes.plot(t, s) self.net", "idx, layer in self.data_layers[::-1]: # plot in right order if layer.selectedFeatureCount(): features =", "parent=None): # fig, self.axes = plt.subplots() # t = np.arange(0.0, 3.0, 0.01) #", "f.attribute('inc')) for f in features], layer.name()) label = repr(g) if self.checkLabels.isChecked() else None", "data Group if layer._is_planar: g = Group([Fol(f.attribute('azi'), f.attribute('inc')) for f in features], layer.name())", "= self.opt(idx, QtWidgets.QCheckBox, 'checkEigLines').isChecked() self.net.tensor(g.ortensor, eigenfols=eigf, eigenlins=eigl) # plot data markersize = self.opt(idx,", "__init__(self, parent=None): # fig, self.axes = plt.subplots() # t = np.arange(0.0, 3.0, 0.01)", "self.setParent(parent) FigureCanvas.setSizePolicy(self, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) FigureCanvas.updateGeometry(self) class ReadSDBPlotDialog(QtWidgets.QDialog, FORM_CLASS): def __init__(self, readsdb, parent=None): \"\"\"Constructor.\"\"\"", "* ab[3]]) cb = self.net.fig.colorbar(cs, cax=cbaxes) if label: cb.ax.set_title(label) # principal eigf =", "sure that we are using QT5 matplotlib.use('Qt5Agg') import numpy as np import matplotlib.pyplot", "layer._is_planar: if self.opt(idx, QtWidgets.QCheckBox, 'checkShowData').isChecked(): if self.opt(idx, QtWidgets.QCheckBox, 'checkAsPoles').isChecked(): self.net.pole(g, marker=marker, markersize=markersize, label=label)", "a QWidget (as well as a FigureCanvasAgg, etc.).\"\"\" def __init__(self, parent=None): # fig,", "# Make sure that we are using QT5 matplotlib.use('Qt5Agg') import numpy as np", "cbaxes = self.net.fig.add_axes([0.1, ab[1] + 0.1 * ab[3], 0.03, 0.8 * ab[3]]) cb", "General Public License as published by * * the Free Software Foundation; either", "Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/ ------------------- begin : 2018-11-03 git sha : $Format:%H$ copyright :", "1e-8 legend = True if self.opt(idx, QtWidgets.QCheckBox, 'checkContoursFilled').isChecked(): if qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontourf(d.triang,", "self.canvas.net def opt(self, index, type, name): return self.tabWidget.widget(index).findChild(type, name) def plotnet(self): self.net.grid =", "as np import matplotlib.pyplot as plt from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas from", "structural data into QGIS Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/ ------------------- begin : 2018-11-03", "PyQt5.QtCore import Qt from qgis.core import * import matplotlib # Make sure that", "3.0, 0.01) # s = np.sin(2 * np.pi * t) # self.axes.plot(t, s)", "**kwargs) else: self.net.contour(StereoGrid(g), levels=nlevels, sigma=sigma) if qgis_qhull_fails: if legend: ab = self.net.fig.axes[self.net.active].get_position().bounds cbaxes", "import matplotlib # Make sure that we are using QT5 matplotlib.use('Qt5Agg') import numpy", "Set up the user interface from Designer. # After setupUI you can access", "markersize = self.opt(idx, QtWidgets.QSpinBox, 'spinSize').value() marker = self.opt(idx, QtWidgets.QComboBox, 'comboStyle').currentText() if layer._is_planar: if", "StereoGrid as StereoGridQGIS FORM_CLASS, _ = uic.loadUiType(os.path.join( os.path.dirname(__file__), 'ui/readsdb_plot.ui')) class MyMplCanvas(FigureCanvas): \"\"\"Ultimately, this", "# s = np.sin(2 * np.pi * t) # self.axes.plot(t, s) self.net =", "self.opt(idx, QtWidgets.QComboBox, 'comboStyle').currentText() if layer._is_planar: if self.opt(idx, QtWidgets.QCheckBox, 'checkShowData').isChecked(): if self.opt(idx, QtWidgets.QCheckBox, 'checkAsPoles').isChecked():", "* This program is free software; you can redistribute it and/or modify *", "you can access any designer object by doing # self.<objectname>, and you can", "data markersize = self.opt(idx, QtWidgets.QSpinBox, 'spinSize').value() marker = self.opt(idx, QtWidgets.QComboBox, 'comboStyle').currentText() if layer._is_planar:", "utf-8 -*- \"\"\" /*************************************************************************** ReadSDBDialog A QGIS plugin Read PySDB structural data into", "levels=nlevels, sigma=sigma) if qgis_qhull_fails: if legend: ab = self.net.fig.axes[self.net.active].get_position().bounds cbaxes = self.net.fig.add_axes([0.1, ab[1]", "it under the terms of the GNU General Public License as published by", "= self.net.fig.axes[self.net.active].get_position().bounds cbaxes = self.net.fig.add_axes([0.1, ab[1] + 0.1 * ab[3], 0.03, 0.8 *", "layer._is_planar: g = Group([Fol(f.attribute('azi'), f.attribute('inc')) for f in features], layer.name()) else: g =", "later version. * * * ***************************************************************************/ \"\"\" import os from PyQt5 import uic", "eigl = self.opt(idx, QtWidgets.QCheckBox, 'checkEigLines').isChecked() self.net.tensor(g.ortensor, eigenfols=eigf, eigenlins=eigl) # plot data markersize =", "2018-11-03 git sha : $Format:%H$ copyright : (C) 2018 by <NAME> email :", "ReadSDBPlotDialog(QtWidgets.QDialog, FORM_CLASS): def __init__(self, readsdb, parent=None): \"\"\"Constructor.\"\"\" super(ReadSDBPlotDialog, self).__init__(parent, Qt.WindowStaysOnTopHint) # Set up", "matplotlib # Make sure that we are using QT5 matplotlib.use('Qt5Agg') import numpy as", "apsg import * # qhull workaroud import platform qgis_qhull_fails = platform.platform().startswith('Linux') if qgis_qhull_fails:", "FORM_CLASS): def __init__(self, readsdb, parent=None): \"\"\"Constructor.\"\"\" super(ReadSDBPlotDialog, self).__init__(parent, Qt.WindowStaysOnTopHint) # Set up the", "by doing # self.<objectname>, and you can use autoconnect slots - see #", "nlevels) levels[-1] += 1e-8 legend = True if self.opt(idx, QtWidgets.QCheckBox, 'checkContoursFilled').isChecked(): if qgis_qhull_fails:", "# After setupUI you can access any designer object by doing # self.<objectname>,", "in right order if layer.selectedFeatureCount(): features = layer.getSelectedFeatures() else: features = layer.getFeatures() #", "self.data_layers = [] self.canvas = MyMplCanvas(self) self.toolbar = NavigationToolbar(self.canvas, self) self.mplLayout.addWidget(self.canvas) self.mplLayout.addWidget(self.toolbar) self.net", "-*- \"\"\" /*************************************************************************** ReadSDBDialog A QGIS plugin Read PySDB structural data into QGIS", "features = layer.getSelectedFeatures() else: features = layer.getFeatures() # Create data Group if layer._is_planar:", "= self.opt(idx, QtWidgets.QCheckBox, 'checkEigPlanes').isChecked() eigl = self.opt(idx, QtWidgets.QCheckBox, 'checkEigLines').isChecked() self.net.tensor(g.ortensor, eigenfols=eigf, eigenlins=eigl) #", "self.pushApply.clicked.connect(self.plotnet) self.data_layers = [] self.canvas = MyMplCanvas(self) self.toolbar = NavigationToolbar(self.canvas, self) self.mplLayout.addWidget(self.canvas) self.mplLayout.addWidget(self.toolbar)", "= self.opt(idx, QtWidgets.QDoubleSpinBox, 'spinSigma').value() if qgis_qhull_fails: kwargs = {'cmap': 'Greys', 'zorder': 1} d", "else: g = Group([Lin(f.attribute('azi'), f.attribute('inc')) for f in features], layer.name()) label = repr(g)", "matplotlib.pyplot as plt from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT", "def plotnet(self): self.net.grid = self.checkGrid.isChecked() self.net.cla() for idx, layer in self.data_layers[::-1]: # plot", "version. * * * ***************************************************************************/ \"\"\" import os from PyQt5 import uic from", "self.opt(idx, QtWidgets.QSpinBox, 'spinSize').value() marker = self.opt(idx, QtWidgets.QComboBox, 'comboStyle').currentText() if layer._is_planar: if self.opt(idx, QtWidgets.QCheckBox,", "self.net = StereoNet() FigureCanvas.__init__(self, self.net.fig) self.setParent(parent) FigureCanvas.setSizePolicy(self, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) FigureCanvas.updateGeometry(self) class ReadSDBPlotDialog(QtWidgets.QDialog, FORM_CLASS):", "# contours if self.opt(idx, QtWidgets.QCheckBox, 'checkContours').isChecked(): nlevels = self.opt(idx, QtWidgets.QSpinBox, 'spinLevels').value() sigma =", "def __init__(self, parent=None): # fig, self.axes = plt.subplots() # t = np.arange(0.0, 3.0,", "if legend: ab = self.net.fig.axes[self.net.active].get_position().bounds cbaxes = self.net.fig.add_axes([0.1, ab[1] + 0.1 * ab[3],", "self.axes = plt.subplots() # t = np.arange(0.0, 3.0, 0.01) # s = np.sin(2", "s) self.net = StereoNet() FigureCanvas.__init__(self, self.net.fig) self.setParent(parent) FigureCanvas.setSizePolicy(self, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) FigureCanvas.updateGeometry(self) class ReadSDBPlotDialog(QtWidgets.QDialog,", "= np.arange(0.0, 3.0, 0.01) # s = np.sin(2 * np.pi * t) #", "matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar from apsg", "t) # self.axes.plot(t, s) self.net = StereoNet() FigureCanvas.__init__(self, self.net.fig) self.setParent(parent) FigureCanvas.setSizePolicy(self, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)", "see # http://qt-project.org/doc/qt-4.8/designer-using-a-ui-file.html # #widgets-and-dialogs-with-auto-connect self.setupUi(self) self.pushApply.clicked.connect(self.plotnet) self.data_layers = [] self.canvas = MyMplCanvas(self)", "FigureCanvasQTAgg as FigureCanvas from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar from apsg import *", "= uic.loadUiType(os.path.join( os.path.dirname(__file__), 'ui/readsdb_plot.ui')) class MyMplCanvas(FigureCanvas): \"\"\"Ultimately, this is a QWidget (as well", "eigenfols=eigf, eigenlins=eigl) # plot data markersize = self.opt(idx, QtWidgets.QSpinBox, 'spinSize').value() marker = self.opt(idx,", "self.data_layers[::-1]: # plot in right order if layer.selectedFeatureCount(): features = layer.getSelectedFeatures() else: features", "else: features = layer.getFeatures() # Create data Group if layer._is_planar: g = Group([Fol(f.attribute('azi'),", "is a QWidget (as well as a FigureCanvasAgg, etc.).\"\"\" def __init__(self, parent=None): #", "self.net.tensor(g.ortensor, eigenfols=eigf, eigenlins=eigl) # plot data markersize = self.opt(idx, QtWidgets.QSpinBox, 'spinSize').value() marker =", "Designer. # After setupUI you can access any designer object by doing #", "as plt from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as", "import os from PyQt5 import uic from PyQt5 import QtWidgets from PyQt5.QtCore import", "= NavigationToolbar(self.canvas, self) self.mplLayout.addWidget(self.canvas) self.mplLayout.addWidget(self.toolbar) self.net = self.canvas.net def opt(self, index, type, name):", "# t = np.arange(0.0, 3.0, 0.01) # s = np.sin(2 * np.pi *", "* t) # self.axes.plot(t, s) self.net = StereoNet() FigureCanvas.__init__(self, self.net.fig) self.setParent(parent) FigureCanvas.setSizePolicy(self, QtWidgets.QSizePolicy.Expanding,", "return self.tabWidget.widget(index).findChild(type, name) def plotnet(self): self.net.grid = self.checkGrid.isChecked() self.net.cla() for idx, layer in", "'ui/readsdb_plot.ui')) class MyMplCanvas(FigureCanvas): \"\"\"Ultimately, this is a QWidget (as well as a FigureCanvasAgg,", "you can use autoconnect slots - see # http://qt-project.org/doc/qt-4.8/designer-using-a-ui-file.html # #widgets-and-dialogs-with-auto-connect self.setupUi(self) self.pushApply.clicked.connect(self.plotnet)", "qgis_qhull_fails = platform.platform().startswith('Linux') if qgis_qhull_fails: from .stereogrid_workaround import StereoGrid as StereoGridQGIS FORM_CLASS, _", "_ = uic.loadUiType(os.path.join( os.path.dirname(__file__), 'ui/readsdb_plot.ui')) class MyMplCanvas(FigureCanvas): \"\"\"Ultimately, this is a QWidget (as", "np.sin(2 * np.pi * t) # self.axes.plot(t, s) self.net = StereoNet() FigureCanvas.__init__(self, self.net.fig)", "<EMAIL> ***************************************************************************/ /*************************************************************************** * * * This program is free software; you can", "import numpy as np import matplotlib.pyplot as plt from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as", "matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar from apsg import * # qhull workaroud import", "self.net.grid = self.checkGrid.isChecked() self.net.cla() for idx, layer in self.data_layers[::-1]: # plot in right", "cb = self.net.fig.colorbar(cs, cax=cbaxes) if label: cb.ax.set_title(label) # principal eigf = self.opt(idx, QtWidgets.QCheckBox,", "using QT5 matplotlib.use('Qt5Agg') import numpy as np import matplotlib.pyplot as plt from matplotlib.backends.backend_qt5agg", "from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar from apsg import * # qhull workaroud", "platform qgis_qhull_fails = platform.platform().startswith('Linux') if qgis_qhull_fails: from .stereogrid_workaround import StereoGrid as StereoGridQGIS FORM_CLASS,", "marker = self.opt(idx, QtWidgets.QComboBox, 'comboStyle').currentText() if layer._is_planar: if self.opt(idx, QtWidgets.QCheckBox, 'checkShowData').isChecked(): if self.opt(idx,", "<gh_stars>1-10 # -*- coding: utf-8 -*- \"\"\" /*************************************************************************** ReadSDBDialog A QGIS plugin Read", "sigma=sigma) if qgis_qhull_fails: if legend: ab = self.net.fig.axes[self.net.active].get_position().bounds cbaxes = self.net.fig.add_axes([0.1, ab[1] +", "the Free Software Foundation; either version 2 of the License, or * *", "* it under the terms of the GNU General Public License as published", "well as a FigureCanvasAgg, etc.).\"\"\" def __init__(self, parent=None): # fig, self.axes = plt.subplots()", "= self.checkGrid.isChecked() self.net.cla() for idx, layer in self.data_layers[::-1]: # plot in right order", "interface from Designer. # After setupUI you can access any designer object by", "Make sure that we are using QT5 matplotlib.use('Qt5Agg') import numpy as np import", "plugin Read PySDB structural data into QGIS Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/ -------------------", ": (C) 2018 by <NAME> email : <EMAIL> ***************************************************************************/ /*************************************************************************** * * *", "class MyMplCanvas(FigureCanvas): \"\"\"Ultimately, this is a QWidget (as well as a FigureCanvasAgg, etc.).\"\"\"", "repr(g) if self.checkLabels.isChecked() else None # contours if self.opt(idx, QtWidgets.QCheckBox, 'checkContours').isChecked(): nlevels =", "QtWidgets.QCheckBox, 'checkContours').isChecked(): nlevels = self.opt(idx, QtWidgets.QSpinBox, 'spinLevels').value() sigma = self.opt(idx, QtWidgets.QDoubleSpinBox, 'spinSigma').value() if", "slots - see # http://qt-project.org/doc/qt-4.8/designer-using-a-ui-file.html # #widgets-and-dialogs-with-auto-connect self.setupUi(self) self.pushApply.clicked.connect(self.plotnet) self.data_layers = [] self.canvas", "any later version. * * * ***************************************************************************/ \"\"\" import os from PyQt5 import", "the user interface from Designer. # After setupUI you can access any designer", "os.path.dirname(__file__), 'ui/readsdb_plot.ui')) class MyMplCanvas(FigureCanvas): \"\"\"Ultimately, this is a QWidget (as well as a", "License, or * * (at your option) any later version. * * *", "'checkEigLines').isChecked() self.net.tensor(g.ortensor, eigenfols=eigf, eigenlins=eigl) # plot data markersize = self.opt(idx, QtWidgets.QSpinBox, 'spinSize').value() marker", "MyMplCanvas(FigureCanvas): \"\"\"Ultimately, this is a QWidget (as well as a FigureCanvasAgg, etc.).\"\"\" def", "coding: utf-8 -*- \"\"\" /*************************************************************************** ReadSDBDialog A QGIS plugin Read PySDB structural data", "# fig, self.axes = plt.subplots() # t = np.arange(0.0, 3.0, 0.01) # s", "d.values, levels, **kwargs) else: self.net.contour(StereoGrid(g), levels=nlevels, sigma=sigma) if qgis_qhull_fails: if legend: ab =", "**kwargs) self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, colors=\"k\") else: self.net.contourf(StereoGrid(g), levels=nlevels, sigma=sigma) else: if qgis_qhull_fails: cs", "QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) FigureCanvas.updateGeometry(self) class ReadSDBPlotDialog(QtWidgets.QDialog, FORM_CLASS): def __init__(self, readsdb, parent=None): \"\"\"Constructor.\"\"\" super(ReadSDBPlotDialog, self).__init__(parent,", "sigma=sigma) else: if qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, **kwargs) else: self.net.contour(StereoGrid(g), levels=nlevels,", "else: self.net.contourf(StereoGrid(g), levels=nlevels, sigma=sigma) else: if qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, **kwargs)", "'zorder': 1} d = StereoGridQGIS(g, sigma=sigma) mn = d.values.min() mx = d.values.max() levels", "layer.selectedFeatureCount(): features = layer.getSelectedFeatures() else: features = layer.getFeatures() # Create data Group if", "this is a QWidget (as well as a FigureCanvasAgg, etc.).\"\"\" def __init__(self, parent=None):", "http://qt-project.org/doc/qt-4.8/designer-using-a-ui-file.html # #widgets-and-dialogs-with-auto-connect self.setupUi(self) self.pushApply.clicked.connect(self.plotnet) self.data_layers = [] self.canvas = MyMplCanvas(self) self.toolbar =", "published by * * the Free Software Foundation; either version 2 of the", "fig, self.axes = plt.subplots() # t = np.arange(0.0, 3.0, 0.01) # s =", "QtWidgets.QCheckBox, 'checkEigLines').isChecked() self.net.tensor(g.ortensor, eigenfols=eigf, eigenlins=eigl) # plot data markersize = self.opt(idx, QtWidgets.QSpinBox, 'spinSize').value()", "FigureCanvas.setSizePolicy(self, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) FigureCanvas.updateGeometry(self) class ReadSDBPlotDialog(QtWidgets.QDialog, FORM_CLASS): def __init__(self, readsdb, parent=None): \"\"\"Constructor.\"\"\" super(ReadSDBPlotDialog,", "setupUI you can access any designer object by doing # self.<objectname>, and you", "\"\"\"Ultimately, this is a QWidget (as well as a FigureCanvasAgg, etc.).\"\"\" def __init__(self,", "of the GNU General Public License as published by * * the Free", "cs = self.net.fig.axes[self.net.active].tricontourf(d.triang, d.values, levels, **kwargs) self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, colors=\"k\") else: self.net.contourf(StereoGrid(g), levels=nlevels,", "qhull workaroud import platform qgis_qhull_fails = platform.platform().startswith('Linux') if qgis_qhull_fails: from .stereogrid_workaround import StereoGrid", "PySDB structural data into QGIS Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/ ------------------- begin :", "__init__(self, readsdb, parent=None): \"\"\"Constructor.\"\"\" super(ReadSDBPlotDialog, self).__init__(parent, Qt.WindowStaysOnTopHint) # Set up the user interface", "'spinLevels').value() sigma = self.opt(idx, QtWidgets.QDoubleSpinBox, 'spinSigma').value() if qgis_qhull_fails: kwargs = {'cmap': 'Greys', 'zorder':", "QtWidgets.QCheckBox, 'checkShowData').isChecked(): if self.opt(idx, QtWidgets.QCheckBox, 'checkAsPoles').isChecked(): self.net.pole(g, marker=marker, markersize=markersize, label=label) else: self.net.plane(g, label=label)", "readsdb, parent=None): \"\"\"Constructor.\"\"\" super(ReadSDBPlotDialog, self).__init__(parent, Qt.WindowStaysOnTopHint) # Set up the user interface from", "if self.opt(idx, QtWidgets.QCheckBox, 'checkContoursFilled').isChecked(): if qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontourf(d.triang, d.values, levels, **kwargs) self.net.fig.axes[self.net.active].tricontour(d.triang,", "if qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontourf(d.triang, d.values, levels, **kwargs) self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, colors=\"k\") else:", "= plt.subplots() # t = np.arange(0.0, 3.0, 0.01) # s = np.sin(2 *", "type, name): return self.tabWidget.widget(index).findChild(type, name) def plotnet(self): self.net.grid = self.checkGrid.isChecked() self.net.cla() for idx,", "mx = d.values.max() levels = np.linspace(mn, mx, nlevels) levels[-1] += 1e-8 legend =", "= self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, **kwargs) else: self.net.contour(StereoGrid(g), levels=nlevels, sigma=sigma) if qgis_qhull_fails: if legend:", "as FigureCanvas from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar from apsg import * #", "# plot in right order if layer.selectedFeatureCount(): features = layer.getSelectedFeatures() else: features =", "designer object by doing # self.<objectname>, and you can use autoconnect slots -", "import * # qhull workaroud import platform qgis_qhull_fails = platform.platform().startswith('Linux') if qgis_qhull_fails: from", "QGIS plugin Read PySDB structural data into QGIS Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/", "2018 by <NAME> email : <EMAIL> ***************************************************************************/ /*************************************************************************** * * * This program", "workaroud import platform qgis_qhull_fails = platform.platform().startswith('Linux') if qgis_qhull_fails: from .stereogrid_workaround import StereoGrid as", "g = Group([Lin(f.attribute('azi'), f.attribute('inc')) for f in features], layer.name()) label = repr(g) if", "1} d = StereoGridQGIS(g, sigma=sigma) mn = d.values.min() mx = d.values.max() levels =", "= self.opt(idx, QtWidgets.QSpinBox, 'spinLevels').value() sigma = self.opt(idx, QtWidgets.QDoubleSpinBox, 'spinSigma').value() if qgis_qhull_fails: kwargs =", "self.opt(idx, QtWidgets.QSpinBox, 'spinLevels').value() sigma = self.opt(idx, QtWidgets.QDoubleSpinBox, 'spinSigma').value() if qgis_qhull_fails: kwargs = {'cmap':", "can access any designer object by doing # self.<objectname>, and you can use", "from PyQt5 import QtWidgets from PyQt5.QtCore import Qt from qgis.core import * import", "ab[3]]) cb = self.net.fig.colorbar(cs, cax=cbaxes) if label: cb.ax.set_title(label) # principal eigf = self.opt(idx,", "markersize=markersize, label=label) else: self.net.plane(g, label=label) else: if self.opt(idx, QtWidgets.QCheckBox, 'checkShowData').isChecked(): self.net.line(g, marker=marker, markersize=markersize,", "from apsg import * # qhull workaroud import platform qgis_qhull_fails = platform.platform().startswith('Linux') if", "mn = d.values.min() mx = d.values.max() levels = np.linspace(mn, mx, nlevels) levels[-1] +=", "in features], layer.name()) label = repr(g) if self.checkLabels.isChecked() else None # contours if", "object by doing # self.<objectname>, and you can use autoconnect slots - see", "git sha : $Format:%H$ copyright : (C) 2018 by <NAME> email : <EMAIL>", "redistribute it and/or modify * * it under the terms of the GNU", "os from PyQt5 import uic from PyQt5 import QtWidgets from PyQt5.QtCore import Qt", "\"\"\"Constructor.\"\"\" super(ReadSDBPlotDialog, self).__init__(parent, Qt.WindowStaysOnTopHint) # Set up the user interface from Designer. #", "- see # http://qt-project.org/doc/qt-4.8/designer-using-a-ui-file.html # #widgets-and-dialogs-with-auto-connect self.setupUi(self) self.pushApply.clicked.connect(self.plotnet) self.data_layers = [] self.canvas =", "np.arange(0.0, 3.0, 0.01) # s = np.sin(2 * np.pi * t) # self.axes.plot(t,", "StereoGridQGIS FORM_CLASS, _ = uic.loadUiType(os.path.join( os.path.dirname(__file__), 'ui/readsdb_plot.ui')) class MyMplCanvas(FigureCanvas): \"\"\"Ultimately, this is a", "np.linspace(mn, mx, nlevels) levels[-1] += 1e-8 legend = True if self.opt(idx, QtWidgets.QCheckBox, 'checkContoursFilled').isChecked():", "as a FigureCanvasAgg, etc.).\"\"\" def __init__(self, parent=None): # fig, self.axes = plt.subplots() #", "sigma = self.opt(idx, QtWidgets.QDoubleSpinBox, 'spinSigma').value() if qgis_qhull_fails: kwargs = {'cmap': 'Greys', 'zorder': 1}", "# Set up the user interface from Designer. # After setupUI you can", "f.attribute('inc')) for f in features], layer.name()) else: g = Group([Lin(f.attribute('azi'), f.attribute('inc')) for f", "-*- coding: utf-8 -*- \"\"\" /*************************************************************************** ReadSDBDialog A QGIS plugin Read PySDB structural", "StereoGridQGIS(g, sigma=sigma) mn = d.values.min() mx = d.values.max() levels = np.linspace(mn, mx, nlevels)", "you can redistribute it and/or modify * * it under the terms of", "that we are using QT5 matplotlib.use('Qt5Agg') import numpy as np import matplotlib.pyplot as", "if self.checkLabels.isChecked() else None # contours if self.opt(idx, QtWidgets.QCheckBox, 'checkContours').isChecked(): nlevels = self.opt(idx,", "are using QT5 matplotlib.use('Qt5Agg') import numpy as np import matplotlib.pyplot as plt from", "* (at your option) any later version. * * * ***************************************************************************/ \"\"\" import", "etc.).\"\"\" def __init__(self, parent=None): # fig, self.axes = plt.subplots() # t = np.arange(0.0,", "self) self.mplLayout.addWidget(self.canvas) self.mplLayout.addWidget(self.toolbar) self.net = self.canvas.net def opt(self, index, type, name): return self.tabWidget.widget(index).findChild(type,", "plot in right order if layer.selectedFeatureCount(): features = layer.getSelectedFeatures() else: features = layer.getFeatures()", "self.opt(idx, QtWidgets.QCheckBox, 'checkContours').isChecked(): nlevels = self.opt(idx, QtWidgets.QSpinBox, 'spinLevels').value() sigma = self.opt(idx, QtWidgets.QDoubleSpinBox, 'spinSigma').value()", "self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, colors=\"k\") else: self.net.contourf(StereoGrid(g), levels=nlevels, sigma=sigma) else: if qgis_qhull_fails: cs =", "# -*- coding: utf-8 -*- \"\"\" /*************************************************************************** ReadSDBDialog A QGIS plugin Read PySDB", "any designer object by doing # self.<objectname>, and you can use autoconnect slots", "free software; you can redistribute it and/or modify * * it under the", "= d.values.min() mx = d.values.max() levels = np.linspace(mn, mx, nlevels) levels[-1] += 1e-8", "FORM_CLASS, _ = uic.loadUiType(os.path.join( os.path.dirname(__file__), 'ui/readsdb_plot.ui')) class MyMplCanvas(FigureCanvas): \"\"\"Ultimately, this is a QWidget", "and you can use autoconnect slots - see # http://qt-project.org/doc/qt-4.8/designer-using-a-ui-file.html # #widgets-and-dialogs-with-auto-connect self.setupUi(self)", "Group([Lin(f.attribute('azi'), f.attribute('inc')) for f in features], layer.name()) label = repr(g) if self.checkLabels.isChecked() else", "* * This program is free software; you can redistribute it and/or modify", "import Qt from qgis.core import * import matplotlib # Make sure that we", "layer in self.data_layers[::-1]: # plot in right order if layer.selectedFeatureCount(): features = layer.getSelectedFeatures()", "import matplotlib.pyplot as plt from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas from matplotlib.backends.backend_qt5agg import", "\"\"\" import os from PyQt5 import uic from PyQt5 import QtWidgets from PyQt5.QtCore", "Qt.WindowStaysOnTopHint) # Set up the user interface from Designer. # After setupUI you", "by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/ ------------------- begin : 2018-11-03 git sha : $Format:%H$ copyright", "d = StereoGridQGIS(g, sigma=sigma) mn = d.values.min() mx = d.values.max() levels = np.linspace(mn,", "self.net = self.canvas.net def opt(self, index, type, name): return self.tabWidget.widget(index).findChild(type, name) def plotnet(self):", "access any designer object by doing # self.<objectname>, and you can use autoconnect", "QGIS Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/ ------------------- begin : 2018-11-03 git sha :", "order if layer.selectedFeatureCount(): features = layer.getSelectedFeatures() else: features = layer.getFeatures() # Create data", "ab[1] + 0.1 * ab[3], 0.03, 0.8 * ab[3]]) cb = self.net.fig.colorbar(cs, cax=cbaxes)", "self.opt(idx, QtWidgets.QCheckBox, 'checkShowData').isChecked(): if self.opt(idx, QtWidgets.QCheckBox, 'checkAsPoles').isChecked(): self.net.pole(g, marker=marker, markersize=markersize, label=label) else: self.net.plane(g,", "a FigureCanvasAgg, etc.).\"\"\" def __init__(self, parent=None): # fig, self.axes = plt.subplots() # t", "self).__init__(parent, Qt.WindowStaysOnTopHint) # Set up the user interface from Designer. # After setupUI", "terms of the GNU General Public License as published by * * the", "if self.opt(idx, QtWidgets.QCheckBox, 'checkContours').isChecked(): nlevels = self.opt(idx, QtWidgets.QSpinBox, 'spinLevels').value() sigma = self.opt(idx, QtWidgets.QDoubleSpinBox,", "user interface from Designer. # After setupUI you can access any designer object", "features], layer.name()) else: g = Group([Lin(f.attribute('azi'), f.attribute('inc')) for f in features], layer.name()) label", "features], layer.name()) label = repr(g) if self.checkLabels.isChecked() else None # contours if self.opt(idx,", "platform.platform().startswith('Linux') if qgis_qhull_fails: from .stereogrid_workaround import StereoGrid as StereoGridQGIS FORM_CLASS, _ = uic.loadUiType(os.path.join(", "from PyQt5.QtCore import Qt from qgis.core import * import matplotlib # Make sure", "+ 0.1 * ab[3], 0.03, 0.8 * ab[3]]) cb = self.net.fig.colorbar(cs, cax=cbaxes) if", "def __init__(self, readsdb, parent=None): \"\"\"Constructor.\"\"\" super(ReadSDBPlotDialog, self).__init__(parent, Qt.WindowStaysOnTopHint) # Set up the user", "the GNU General Public License as published by * * the Free Software", "'Greys', 'zorder': 1} d = StereoGridQGIS(g, sigma=sigma) mn = d.values.min() mx = d.values.max()", "use autoconnect slots - see # http://qt-project.org/doc/qt-4.8/designer-using-a-ui-file.html # #widgets-and-dialogs-with-auto-connect self.setupUi(self) self.pushApply.clicked.connect(self.plotnet) self.data_layers =", "= self.canvas.net def opt(self, index, type, name): return self.tabWidget.widget(index).findChild(type, name) def plotnet(self): self.net.grid", "Qt from qgis.core import * import matplotlib # Make sure that we are", "qgis_qhull_fails: if legend: ab = self.net.fig.axes[self.net.active].get_position().bounds cbaxes = self.net.fig.add_axes([0.1, ab[1] + 0.1 *", "QtWidgets from PyQt5.QtCore import Qt from qgis.core import * import matplotlib # Make", "it and/or modify * * it under the terms of the GNU General", "legend: ab = self.net.fig.axes[self.net.active].get_position().bounds cbaxes = self.net.fig.add_axes([0.1, ab[1] + 0.1 * ab[3], 0.03,", "sha : $Format:%H$ copyright : (C) 2018 by <NAME> email : <EMAIL> ***************************************************************************/", "* np.pi * t) # self.axes.plot(t, s) self.net = StereoNet() FigureCanvas.__init__(self, self.net.fig) self.setParent(parent)", "as published by * * the Free Software Foundation; either version 2 of", "self.opt(idx, QtWidgets.QCheckBox, 'checkEigLines').isChecked() self.net.tensor(g.ortensor, eigenfols=eigf, eigenlins=eigl) # plot data markersize = self.opt(idx, QtWidgets.QSpinBox,", "label: cb.ax.set_title(label) # principal eigf = self.opt(idx, QtWidgets.QCheckBox, 'checkEigPlanes').isChecked() eigl = self.opt(idx, QtWidgets.QCheckBox,", "your option) any later version. * * * ***************************************************************************/ \"\"\" import os from", "+= 1e-8 legend = True if self.opt(idx, QtWidgets.QCheckBox, 'checkContoursFilled').isChecked(): if qgis_qhull_fails: cs =", "self.mplLayout.addWidget(self.canvas) self.mplLayout.addWidget(self.toolbar) self.net = self.canvas.net def opt(self, index, type, name): return self.tabWidget.widget(index).findChild(type, name)", "# self.axes.plot(t, s) self.net = StereoNet() FigureCanvas.__init__(self, self.net.fig) self.setParent(parent) FigureCanvas.setSizePolicy(self, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) FigureCanvas.updateGeometry(self)", "plotnet(self): self.net.grid = self.checkGrid.isChecked() self.net.cla() for idx, layer in self.data_layers[::-1]: # plot in", "Public License as published by * * the Free Software Foundation; either version", "= layer.getSelectedFeatures() else: features = layer.getFeatures() # Create data Group if layer._is_planar: g", "layer.getSelectedFeatures() else: features = layer.getFeatures() # Create data Group if layer._is_planar: g =", "self.net.fig.axes[self.net.active].tricontourf(d.triang, d.values, levels, **kwargs) self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, colors=\"k\") else: self.net.contourf(StereoGrid(g), levels=nlevels, sigma=sigma) else:", "self.opt(idx, QtWidgets.QCheckBox, 'checkContoursFilled').isChecked(): if qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontourf(d.triang, d.values, levels, **kwargs) self.net.fig.axes[self.net.active].tricontour(d.triang, d.values,", "import QtWidgets from PyQt5.QtCore import Qt from qgis.core import * import matplotlib #", "np.pi * t) # self.axes.plot(t, s) self.net = StereoNet() FigureCanvas.__init__(self, self.net.fig) self.setParent(parent) FigureCanvas.setSizePolicy(self,", "NavigationToolbar(self.canvas, self) self.mplLayout.addWidget(self.canvas) self.mplLayout.addWidget(self.toolbar) self.net = self.canvas.net def opt(self, index, type, name): return", "and/or modify * * it under the terms of the GNU General Public", "under the terms of the GNU General Public License as published by *", "option) any later version. * * * ***************************************************************************/ \"\"\" import os from PyQt5", "* import matplotlib # Make sure that we are using QT5 matplotlib.use('Qt5Agg') import", "begin : 2018-11-03 git sha : $Format:%H$ copyright : (C) 2018 by <NAME>", "Group if layer._is_planar: g = Group([Fol(f.attribute('azi'), f.attribute('inc')) for f in features], layer.name()) else:", "0.1 * ab[3], 0.03, 0.8 * ab[3]]) cb = self.net.fig.colorbar(cs, cax=cbaxes) if label:", "After setupUI you can access any designer object by doing # self.<objectname>, and", "self.tabWidget.widget(index).findChild(type, name) def plotnet(self): self.net.grid = self.checkGrid.isChecked() self.net.cla() for idx, layer in self.data_layers[::-1]:", "numpy as np import matplotlib.pyplot as plt from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas", "import * import matplotlib # Make sure that we are using QT5 matplotlib.use('Qt5Agg')", "None # contours if self.opt(idx, QtWidgets.QCheckBox, 'checkContours').isChecked(): nlevels = self.opt(idx, QtWidgets.QSpinBox, 'spinLevels').value() sigma", "legend = True if self.opt(idx, QtWidgets.QCheckBox, 'checkContoursFilled').isChecked(): if qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontourf(d.triang, d.values,", "qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, **kwargs) else: self.net.contour(StereoGrid(g), levels=nlevels, sigma=sigma) if qgis_qhull_fails:", "nlevels = self.opt(idx, QtWidgets.QSpinBox, 'spinLevels').value() sigma = self.opt(idx, QtWidgets.QDoubleSpinBox, 'spinSigma').value() if qgis_qhull_fails: kwargs", "* * it under the terms of the GNU General Public License as", "self.checkGrid.isChecked() self.net.cla() for idx, layer in self.data_layers[::-1]: # plot in right order if", "np import matplotlib.pyplot as plt from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas from matplotlib.backends.backend_qt5agg", "QtWidgets.QDoubleSpinBox, 'spinSigma').value() if qgis_qhull_fails: kwargs = {'cmap': 'Greys', 'zorder': 1} d = StereoGridQGIS(g,", "right order if layer.selectedFeatureCount(): features = layer.getSelectedFeatures() else: features = layer.getFeatures() # Create", "label=label) else: self.net.plane(g, label=label) else: if self.opt(idx, QtWidgets.QCheckBox, 'checkShowData').isChecked(): self.net.line(g, marker=marker, markersize=markersize, label=label)", "FigureCanvas from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar from apsg import * # qhull", "cax=cbaxes) if label: cb.ax.set_title(label) # principal eigf = self.opt(idx, QtWidgets.QCheckBox, 'checkEigPlanes').isChecked() eigl =", "else: if qgis_qhull_fails: cs = self.net.fig.axes[self.net.active].tricontour(d.triang, d.values, levels, **kwargs) else: self.net.contour(StereoGrid(g), levels=nlevels, sigma=sigma)", "self.net.pole(g, marker=marker, markersize=markersize, label=label) else: self.net.plane(g, label=label) else: if self.opt(idx, QtWidgets.QCheckBox, 'checkShowData').isChecked(): self.net.line(g,", "* * the Free Software Foundation; either version 2 of the License, or", "from Designer. # After setupUI you can access any designer object by doing", "StereoNet() FigureCanvas.__init__(self, self.net.fig) self.setParent(parent) FigureCanvas.setSizePolicy(self, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding) FigureCanvas.updateGeometry(self) class ReadSDBPlotDialog(QtWidgets.QDialog, FORM_CLASS): def __init__(self,", "* * * ***************************************************************************/ \"\"\" import os from PyQt5 import uic from PyQt5", "GNU General Public License as published by * * the Free Software Foundation;" ]
[ "model, width, height): for i, image in enumerate(nyu2_loader): image = torch.autograd.Variable(image, volatile=True).cuda() out", "[256, 512, 1024, 2048]) return model def main(): if (not os.path.exists(args.output)): print(\"Output directory", "model = torch.nn.DataParallel(model).cuda() model.load_state_dict(torch.load('./pretrained_model/model_senet')) model.eval() print img = cv2.imread(args.img) nyu2_loader = loaddata.readNyu2(args.img) test(nyu2_loader,", "import cv2 import os import torch.nn.parallel import modules, net, resnet, densenet, senet import", "parser.add_argument('--json', metavar='DIR',default=\"./input/test.json\", help='json file to input') parser.add_argument('--output', metavar='DIR',default=\"./output\", help='dir to output') args=parser.parse_args() def", "= resnet.resnet50(pretrained = True) Encoder = modules.E_resnet(original_model) model = net.model(Encoder, num_features=2048, block_channel =", "test(nyu2_loader, model, img.shape[1], img.shape[0]) def test(nyu2_loader, model, width, height): for i, image in", "in enumerate(nyu2_loader): image = torch.autograd.Variable(image, volatile=True).cuda() out = model(image) out = out.view(out.size(2),out.size(3)).data.cpu().numpy() max_pix", "print(vol) print(\"unit: cm^3\") out_file = open(os.path.join(args.output, \"out.txt\"), \"w\") out_file.write(\"Volume:\\n\") out_file.write(str(vol)) out_file.write(\"\\n\") out_file.write(\"unit: cm^3\")", "import matplotlib.image import matplotlib.pyplot as plt parser = argparse.ArgumentParser(description='KD-network') parser.add_argument('--img', metavar='DIR',default=\"./input/test.jpg\", help='img to", "= open(os.path.join(args.output, \"out.txt\"), \"w\") out_file.write(\"Volume:\\n\") out_file.write(str(vol)) out_file.write(\"\\n\") out_file.write(\"unit: cm^3\") out_file.close() get_mask(out_grey, args.json, args.output)", "args=parser.parse_args() def define_model(is_resnet, is_densenet, is_senet): if is_resnet: original_model = resnet.resnet50(pretrained = True) Encoder", "= net.model(Encoder, num_features=2208, block_channel = [192, 384, 1056, 2208]) if is_senet: original_model =", "512, 1024, 2048]) if is_densenet: original_model = densenet.densenet161(pretrained=True) Encoder = modules.E_densenet(original_model) model =", "out = model(image) out = out.view(out.size(2),out.size(3)).data.cpu().numpy() max_pix = out.max() min_pix = out.min() out", "is_resnet: original_model = resnet.resnet50(pretrained = True) Encoder = modules.E_resnet(original_model) model = net.model(Encoder, num_features=2048,", "384, 1056, 2208]) if is_senet: original_model = senet.senet154(pretrained='imagenet') Encoder = modules.E_senet(original_model) model =", "max_pix = out.max() min_pix = out.min() out = (out-min_pix)/(max_pix-min_pix)*255 out = cv2.resize(out,(width,height),interpolation=cv2.INTER_CUBIC) cv2.imwrite(os.path.join(args.output,", "input') parser.add_argument('--json', metavar='DIR',default=\"./input/test.json\", help='json file to input') parser.add_argument('--output', metavar='DIR',default=\"./output\", help='dir to output') args=parser.parse_args()", "resnet.resnet50(pretrained = True) Encoder = modules.E_resnet(original_model) model = net.model(Encoder, num_features=2048, block_channel = [256,", "loaddata import pdb import argparse from volume import get_volume from mask import get_mask", "Encoder = modules.E_densenet(original_model) model = net.model(Encoder, num_features=2208, block_channel = [192, 384, 1056, 2208])", "block_channel = [256, 512, 1024, 2048]) if is_densenet: original_model = densenet.densenet161(pretrained=True) Encoder =", "= [256, 512, 1024, 2048]) return model def main(): if (not os.path.exists(args.output)): print(\"Output", "model.eval() print img = cv2.imread(args.img) nyu2_loader = loaddata.readNyu2(args.img) test(nyu2_loader, model, img.shape[1], img.shape[0]) def", "1024, 2048]) if is_densenet: original_model = densenet.densenet161(pretrained=True) Encoder = modules.E_densenet(original_model) model = net.model(Encoder,", "mask import get_mask import matplotlib.image import matplotlib.pyplot as plt parser = argparse.ArgumentParser(description='KD-network') parser.add_argument('--img',", "metavar='DIR',default=\"./output\", help='dir to output') args=parser.parse_args() def define_model(is_resnet, is_densenet, is_senet): if is_resnet: original_model =", "import matplotlib.pyplot as plt parser = argparse.ArgumentParser(description='KD-network') parser.add_argument('--img', metavar='DIR',default=\"./input/test.jpg\", help='img to input') parser.add_argument('--json',", "[256, 512, 1024, 2048]) if is_densenet: original_model = densenet.densenet161(pretrained=True) Encoder = modules.E_densenet(original_model) model", "define_model(is_resnet=False, is_densenet=False, is_senet=True) model = torch.nn.DataParallel(model).cuda() model.load_state_dict(torch.load('./pretrained_model/model_senet')) model.eval() print img = cv2.imread(args.img) nyu2_loader", "out_grey = cv2.imread(os.path.join(args.output, \"out_grey.png\"),0) out_color = cv2.applyColorMap(out_grey, cv2.COLORMAP_JET) cv2.imwrite(os.path.join(args.output, \"out_color.png\"),out_color) vol = get_volume(out_grey,", "= out.max() min_pix = out.min() out = (out-min_pix)/(max_pix-min_pix)*255 out = cv2.resize(out,(width,height),interpolation=cv2.INTER_CUBIC) cv2.imwrite(os.path.join(args.output, \"out_grey.png\"),out)", "\"w\") out_file.write(\"Volume:\\n\") out_file.write(str(vol)) out_file.write(\"\\n\") out_file.write(\"unit: cm^3\") out_file.close() get_mask(out_grey, args.json, args.output) if __name__ ==", "out_color = cv2.applyColorMap(out_grey, cv2.COLORMAP_JET) cv2.imwrite(os.path.join(args.output, \"out_color.png\"),out_color) vol = get_volume(out_grey, args.json) print(\"Volume:\") print(vol) print(\"unit:", "is_senet=True) model = torch.nn.DataParallel(model).cuda() model.load_state_dict(torch.load('./pretrained_model/model_senet')) model.eval() print img = cv2.imread(args.img) nyu2_loader = loaddata.readNyu2(args.img)", "original_model = densenet.densenet161(pretrained=True) Encoder = modules.E_densenet(original_model) model = net.model(Encoder, num_features=2208, block_channel = [192,", "= get_volume(out_grey, args.json) print(\"Volume:\") print(vol) print(\"unit: cm^3\") out_file = open(os.path.join(args.output, \"out.txt\"), \"w\") out_file.write(\"Volume:\\n\")", "import torch import cv2 import os import torch.nn.parallel import modules, net, resnet, densenet,", "out_file.write(str(vol)) out_file.write(\"\\n\") out_file.write(\"unit: cm^3\") out_file.close() get_mask(out_grey, args.json, args.output) if __name__ == '__main__': main()", "exist! Creating...\") os.makedirs(args.output) model = define_model(is_resnet=False, is_densenet=False, is_senet=True) model = torch.nn.DataParallel(model).cuda() model.load_state_dict(torch.load('./pretrained_model/model_senet')) model.eval()", "import pdb import argparse from volume import get_volume from mask import get_mask import", "matplotlib.pyplot as plt parser = argparse.ArgumentParser(description='KD-network') parser.add_argument('--img', metavar='DIR',default=\"./input/test.jpg\", help='img to input') parser.add_argument('--json', metavar='DIR',default=\"./input/test.json\",", "to input') parser.add_argument('--output', metavar='DIR',default=\"./output\", help='dir to output') args=parser.parse_args() def define_model(is_resnet, is_densenet, is_senet): if", "get_mask import matplotlib.image import matplotlib.pyplot as plt parser = argparse.ArgumentParser(description='KD-network') parser.add_argument('--img', metavar='DIR',default=\"./input/test.jpg\", help='img", "cv2.imwrite(os.path.join(args.output, \"out_color.png\"),out_color) vol = get_volume(out_grey, args.json) print(\"Volume:\") print(vol) print(\"unit: cm^3\") out_file = open(os.path.join(args.output,", "torch.nn.parallel import modules, net, resnet, densenet, senet import numpy as np import loaddata_demo", "modules, net, resnet, densenet, senet import numpy as np import loaddata_demo as loaddata", "img = cv2.imread(args.img) nyu2_loader = loaddata.readNyu2(args.img) test(nyu2_loader, model, img.shape[1], img.shape[0]) def test(nyu2_loader, model,", "get_volume(out_grey, args.json) print(\"Volume:\") print(vol) print(\"unit: cm^3\") out_file = open(os.path.join(args.output, \"out.txt\"), \"w\") out_file.write(\"Volume:\\n\") out_file.write(str(vol))", "vol = get_volume(out_grey, args.json) print(\"Volume:\") print(vol) print(\"unit: cm^3\") out_file = open(os.path.join(args.output, \"out.txt\"), \"w\")", "model(image) out = out.view(out.size(2),out.size(3)).data.cpu().numpy() max_pix = out.max() min_pix = out.min() out = (out-min_pix)/(max_pix-min_pix)*255", "= out.view(out.size(2),out.size(3)).data.cpu().numpy() max_pix = out.max() min_pix = out.min() out = (out-min_pix)/(max_pix-min_pix)*255 out =", "original_model = senet.senet154(pretrained='imagenet') Encoder = modules.E_senet(original_model) model = net.model(Encoder, num_features=2048, block_channel = [256,", "metavar='DIR',default=\"./input/test.jpg\", help='img to input') parser.add_argument('--json', metavar='DIR',default=\"./input/test.json\", help='json file to input') parser.add_argument('--output', metavar='DIR',default=\"./output\", help='dir", "model = net.model(Encoder, num_features=2048, block_channel = [256, 512, 1024, 2048]) if is_densenet: original_model", "nyu2_loader = loaddata.readNyu2(args.img) test(nyu2_loader, model, img.shape[1], img.shape[0]) def test(nyu2_loader, model, width, height): for", "densenet.densenet161(pretrained=True) Encoder = modules.E_densenet(original_model) model = net.model(Encoder, num_features=2208, block_channel = [192, 384, 1056,", "from mask import get_mask import matplotlib.image import matplotlib.pyplot as plt parser = argparse.ArgumentParser(description='KD-network')", "get_volume from mask import get_mask import matplotlib.image import matplotlib.pyplot as plt parser =", "to output') args=parser.parse_args() def define_model(is_resnet, is_densenet, is_senet): if is_resnet: original_model = resnet.resnet50(pretrained =", "import argparse import torch import cv2 import os import torch.nn.parallel import modules, net,", "define_model(is_resnet, is_densenet, is_senet): if is_resnet: original_model = resnet.resnet50(pretrained = True) Encoder = modules.E_resnet(original_model)", "input') parser.add_argument('--output', metavar='DIR',default=\"./output\", help='dir to output') args=parser.parse_args() def define_model(is_resnet, is_densenet, is_senet): if is_resnet:", "print(\"unit: cm^3\") out_file = open(os.path.join(args.output, \"out.txt\"), \"w\") out_file.write(\"Volume:\\n\") out_file.write(str(vol)) out_file.write(\"\\n\") out_file.write(\"unit: cm^3\") out_file.close()", "\"out.txt\"), \"w\") out_file.write(\"Volume:\\n\") out_file.write(str(vol)) out_file.write(\"\\n\") out_file.write(\"unit: cm^3\") out_file.close() get_mask(out_grey, args.json, args.output) if __name__", "torch.nn.DataParallel(model).cuda() model.load_state_dict(torch.load('./pretrained_model/model_senet')) model.eval() print img = cv2.imread(args.img) nyu2_loader = loaddata.readNyu2(args.img) test(nyu2_loader, model, img.shape[1],", "for i, image in enumerate(nyu2_loader): image = torch.autograd.Variable(image, volatile=True).cuda() out = model(image) out", "Encoder = modules.E_senet(original_model) model = net.model(Encoder, num_features=2048, block_channel = [256, 512, 1024, 2048])", "if is_densenet: original_model = densenet.densenet161(pretrained=True) Encoder = modules.E_densenet(original_model) model = net.model(Encoder, num_features=2208, block_channel", "to input') parser.add_argument('--json', metavar='DIR',default=\"./input/test.json\", help='json file to input') parser.add_argument('--output', metavar='DIR',default=\"./output\", help='dir to output')", "argparse import torch import cv2 import os import torch.nn.parallel import modules, net, resnet,", "= net.model(Encoder, num_features=2048, block_channel = [256, 512, 1024, 2048]) if is_densenet: original_model =", "i, image in enumerate(nyu2_loader): image = torch.autograd.Variable(image, volatile=True).cuda() out = model(image) out =", "img.shape[1], img.shape[0]) def test(nyu2_loader, model, width, height): for i, image in enumerate(nyu2_loader): image", "import loaddata_demo as loaddata import pdb import argparse from volume import get_volume from", "parser.add_argument('--output', metavar='DIR',default=\"./output\", help='dir to output') args=parser.parse_args() def define_model(is_resnet, is_densenet, is_senet): if is_resnet: original_model", "help='dir to output') args=parser.parse_args() def define_model(is_resnet, is_densenet, is_senet): if is_resnet: original_model = resnet.resnet50(pretrained", "pdb import argparse from volume import get_volume from mask import get_mask import matplotlib.image", "num_features=2048, block_channel = [256, 512, 1024, 2048]) if is_densenet: original_model = densenet.densenet161(pretrained=True) Encoder", "import numpy as np import loaddata_demo as loaddata import pdb import argparse from", "import get_mask import matplotlib.image import matplotlib.pyplot as plt parser = argparse.ArgumentParser(description='KD-network') parser.add_argument('--img', metavar='DIR',default=\"./input/test.jpg\",", "densenet, senet import numpy as np import loaddata_demo as loaddata import pdb import", "loaddata_demo as loaddata import pdb import argparse from volume import get_volume from mask", "modules.E_resnet(original_model) model = net.model(Encoder, num_features=2048, block_channel = [256, 512, 1024, 2048]) if is_densenet:", "2208]) if is_senet: original_model = senet.senet154(pretrained='imagenet') Encoder = modules.E_senet(original_model) model = net.model(Encoder, num_features=2048,", "= densenet.densenet161(pretrained=True) Encoder = modules.E_densenet(original_model) model = net.model(Encoder, num_features=2208, block_channel = [192, 384,", "= argparse.ArgumentParser(description='KD-network') parser.add_argument('--img', metavar='DIR',default=\"./input/test.jpg\", help='img to input') parser.add_argument('--json', metavar='DIR',default=\"./input/test.json\", help='json file to input')", "volatile=True).cuda() out = model(image) out = out.view(out.size(2),out.size(3)).data.cpu().numpy() max_pix = out.max() min_pix = out.min()", "as loaddata import pdb import argparse from volume import get_volume from mask import", "argparse.ArgumentParser(description='KD-network') parser.add_argument('--img', metavar='DIR',default=\"./input/test.jpg\", help='img to input') parser.add_argument('--json', metavar='DIR',default=\"./input/test.json\", help='json file to input') parser.add_argument('--output',", "import torch.nn.parallel import modules, net, resnet, densenet, senet import numpy as np import", "print img = cv2.imread(args.img) nyu2_loader = loaddata.readNyu2(args.img) test(nyu2_loader, model, img.shape[1], img.shape[0]) def test(nyu2_loader,", "is_densenet=False, is_senet=True) model = torch.nn.DataParallel(model).cuda() model.load_state_dict(torch.load('./pretrained_model/model_senet')) model.eval() print img = cv2.imread(args.img) nyu2_loader =", "plt parser = argparse.ArgumentParser(description='KD-network') parser.add_argument('--img', metavar='DIR',default=\"./input/test.jpg\", help='img to input') parser.add_argument('--json', metavar='DIR',default=\"./input/test.json\", help='json file", "model.load_state_dict(torch.load('./pretrained_model/model_senet')) model.eval() print img = cv2.imread(args.img) nyu2_loader = loaddata.readNyu2(args.img) test(nyu2_loader, model, img.shape[1], img.shape[0])", "= model(image) out = out.view(out.size(2),out.size(3)).data.cpu().numpy() max_pix = out.max() min_pix = out.min() out =", "= (out-min_pix)/(max_pix-min_pix)*255 out = cv2.resize(out,(width,height),interpolation=cv2.INTER_CUBIC) cv2.imwrite(os.path.join(args.output, \"out_grey.png\"),out) out_grey = cv2.imread(os.path.join(args.output, \"out_grey.png\"),0) out_color =", "doesn't exist! Creating...\") os.makedirs(args.output) model = define_model(is_resnet=False, is_densenet=False, is_senet=True) model = torch.nn.DataParallel(model).cuda() model.load_state_dict(torch.load('./pretrained_model/model_senet'))", "= [256, 512, 1024, 2048]) if is_densenet: original_model = densenet.densenet161(pretrained=True) Encoder = modules.E_densenet(original_model)", "cv2.COLORMAP_JET) cv2.imwrite(os.path.join(args.output, \"out_color.png\"),out_color) vol = get_volume(out_grey, args.json) print(\"Volume:\") print(vol) print(\"unit: cm^3\") out_file =", "output') args=parser.parse_args() def define_model(is_resnet, is_densenet, is_senet): if is_resnet: original_model = resnet.resnet50(pretrained = True)", "image in enumerate(nyu2_loader): image = torch.autograd.Variable(image, volatile=True).cuda() out = model(image) out = out.view(out.size(2),out.size(3)).data.cpu().numpy()", "model = define_model(is_resnet=False, is_densenet=False, is_senet=True) model = torch.nn.DataParallel(model).cuda() model.load_state_dict(torch.load('./pretrained_model/model_senet')) model.eval() print img =", "os.path.exists(args.output)): print(\"Output directory doesn't exist! Creating...\") os.makedirs(args.output) model = define_model(is_resnet=False, is_densenet=False, is_senet=True) model", "= modules.E_resnet(original_model) model = net.model(Encoder, num_features=2048, block_channel = [256, 512, 1024, 2048]) if", "cv2.resize(out,(width,height),interpolation=cv2.INTER_CUBIC) cv2.imwrite(os.path.join(args.output, \"out_grey.png\"),out) out_grey = cv2.imread(os.path.join(args.output, \"out_grey.png\"),0) out_color = cv2.applyColorMap(out_grey, cv2.COLORMAP_JET) cv2.imwrite(os.path.join(args.output, \"out_color.png\"),out_color)", "parser = argparse.ArgumentParser(description='KD-network') parser.add_argument('--img', metavar='DIR',default=\"./input/test.jpg\", help='img to input') parser.add_argument('--json', metavar='DIR',default=\"./input/test.json\", help='json file to", "cm^3\") out_file = open(os.path.join(args.output, \"out.txt\"), \"w\") out_file.write(\"Volume:\\n\") out_file.write(str(vol)) out_file.write(\"\\n\") out_file.write(\"unit: cm^3\") out_file.close() get_mask(out_grey,", "(out-min_pix)/(max_pix-min_pix)*255 out = cv2.resize(out,(width,height),interpolation=cv2.INTER_CUBIC) cv2.imwrite(os.path.join(args.output, \"out_grey.png\"),out) out_grey = cv2.imread(os.path.join(args.output, \"out_grey.png\"),0) out_color = cv2.applyColorMap(out_grey,", "cv2.imread(args.img) nyu2_loader = loaddata.readNyu2(args.img) test(nyu2_loader, model, img.shape[1], img.shape[0]) def test(nyu2_loader, model, width, height):", "volume import get_volume from mask import get_mask import matplotlib.image import matplotlib.pyplot as plt", "original_model = resnet.resnet50(pretrained = True) Encoder = modules.E_resnet(original_model) model = net.model(Encoder, num_features=2048, block_channel", "height): for i, image in enumerate(nyu2_loader): image = torch.autograd.Variable(image, volatile=True).cuda() out = model(image)", "def define_model(is_resnet, is_densenet, is_senet): if is_resnet: original_model = resnet.resnet50(pretrained = True) Encoder =", "if is_resnet: original_model = resnet.resnet50(pretrained = True) Encoder = modules.E_resnet(original_model) model = net.model(Encoder,", "if is_senet: original_model = senet.senet154(pretrained='imagenet') Encoder = modules.E_senet(original_model) model = net.model(Encoder, num_features=2048, block_channel", "help='img to input') parser.add_argument('--json', metavar='DIR',default=\"./input/test.json\", help='json file to input') parser.add_argument('--output', metavar='DIR',default=\"./output\", help='dir to", "image = torch.autograd.Variable(image, volatile=True).cuda() out = model(image) out = out.view(out.size(2),out.size(3)).data.cpu().numpy() max_pix = out.max()", "resnet, densenet, senet import numpy as np import loaddata_demo as loaddata import pdb", "is_densenet, is_senet): if is_resnet: original_model = resnet.resnet50(pretrained = True) Encoder = modules.E_resnet(original_model) model", "senet.senet154(pretrained='imagenet') Encoder = modules.E_senet(original_model) model = net.model(Encoder, num_features=2048, block_channel = [256, 512, 1024,", "cv2 import os import torch.nn.parallel import modules, net, resnet, densenet, senet import numpy", "out = (out-min_pix)/(max_pix-min_pix)*255 out = cv2.resize(out,(width,height),interpolation=cv2.INTER_CUBIC) cv2.imwrite(os.path.join(args.output, \"out_grey.png\"),out) out_grey = cv2.imread(os.path.join(args.output, \"out_grey.png\"),0) out_color", "np import loaddata_demo as loaddata import pdb import argparse from volume import get_volume", "os.makedirs(args.output) model = define_model(is_resnet=False, is_densenet=False, is_senet=True) model = torch.nn.DataParallel(model).cuda() model.load_state_dict(torch.load('./pretrained_model/model_senet')) model.eval() print img", "net.model(Encoder, num_features=2048, block_channel = [256, 512, 1024, 2048]) return model def main(): if", "out.min() out = (out-min_pix)/(max_pix-min_pix)*255 out = cv2.resize(out,(width,height),interpolation=cv2.INTER_CUBIC) cv2.imwrite(os.path.join(args.output, \"out_grey.png\"),out) out_grey = cv2.imread(os.path.join(args.output, \"out_grey.png\"),0)", "block_channel = [192, 384, 1056, 2208]) if is_senet: original_model = senet.senet154(pretrained='imagenet') Encoder =", "model = net.model(Encoder, num_features=2048, block_channel = [256, 512, 1024, 2048]) return model def", "\"out_color.png\"),out_color) vol = get_volume(out_grey, args.json) print(\"Volume:\") print(vol) print(\"unit: cm^3\") out_file = open(os.path.join(args.output, \"out.txt\"),", "import get_volume from mask import get_mask import matplotlib.image import matplotlib.pyplot as plt parser", "model, img.shape[1], img.shape[0]) def test(nyu2_loader, model, width, height): for i, image in enumerate(nyu2_loader):", "\"out_grey.png\"),out) out_grey = cv2.imread(os.path.join(args.output, \"out_grey.png\"),0) out_color = cv2.applyColorMap(out_grey, cv2.COLORMAP_JET) cv2.imwrite(os.path.join(args.output, \"out_color.png\"),out_color) vol =", "torch.autograd.Variable(image, volatile=True).cuda() out = model(image) out = out.view(out.size(2),out.size(3)).data.cpu().numpy() max_pix = out.max() min_pix =", "enumerate(nyu2_loader): image = torch.autograd.Variable(image, volatile=True).cuda() out = model(image) out = out.view(out.size(2),out.size(3)).data.cpu().numpy() max_pix =", "net.model(Encoder, num_features=2208, block_channel = [192, 384, 1056, 2208]) if is_senet: original_model = senet.senet154(pretrained='imagenet')", "= define_model(is_resnet=False, is_densenet=False, is_senet=True) model = torch.nn.DataParallel(model).cuda() model.load_state_dict(torch.load('./pretrained_model/model_senet')) model.eval() print img = cv2.imread(args.img)", "parser.add_argument('--img', metavar='DIR',default=\"./input/test.jpg\", help='img to input') parser.add_argument('--json', metavar='DIR',default=\"./input/test.json\", help='json file to input') parser.add_argument('--output', metavar='DIR',default=\"./output\",", "from volume import get_volume from mask import get_mask import matplotlib.image import matplotlib.pyplot as", "block_channel = [256, 512, 1024, 2048]) return model def main(): if (not os.path.exists(args.output)):", "= True) Encoder = modules.E_resnet(original_model) model = net.model(Encoder, num_features=2048, block_channel = [256, 512,", "= modules.E_senet(original_model) model = net.model(Encoder, num_features=2048, block_channel = [256, 512, 1024, 2048]) return", "Encoder = modules.E_resnet(original_model) model = net.model(Encoder, num_features=2048, block_channel = [256, 512, 1024, 2048])", "modules.E_senet(original_model) model = net.model(Encoder, num_features=2048, block_channel = [256, 512, 1024, 2048]) return model", "loaddata.readNyu2(args.img) test(nyu2_loader, model, img.shape[1], img.shape[0]) def test(nyu2_loader, model, width, height): for i, image", "test(nyu2_loader, model, width, height): for i, image in enumerate(nyu2_loader): image = torch.autograd.Variable(image, volatile=True).cuda()", "argparse from volume import get_volume from mask import get_mask import matplotlib.image import matplotlib.pyplot", "= cv2.imread(os.path.join(args.output, \"out_grey.png\"),0) out_color = cv2.applyColorMap(out_grey, cv2.COLORMAP_JET) cv2.imwrite(os.path.join(args.output, \"out_color.png\"),out_color) vol = get_volume(out_grey, args.json)", "= net.model(Encoder, num_features=2048, block_channel = [256, 512, 1024, 2048]) return model def main():", "return model def main(): if (not os.path.exists(args.output)): print(\"Output directory doesn't exist! Creating...\") os.makedirs(args.output)", "(not os.path.exists(args.output)): print(\"Output directory doesn't exist! Creating...\") os.makedirs(args.output) model = define_model(is_resnet=False, is_densenet=False, is_senet=True)", "out = cv2.resize(out,(width,height),interpolation=cv2.INTER_CUBIC) cv2.imwrite(os.path.join(args.output, \"out_grey.png\"),out) out_grey = cv2.imread(os.path.join(args.output, \"out_grey.png\"),0) out_color = cv2.applyColorMap(out_grey, cv2.COLORMAP_JET)", "is_senet: original_model = senet.senet154(pretrained='imagenet') Encoder = modules.E_senet(original_model) model = net.model(Encoder, num_features=2048, block_channel =", "main(): if (not os.path.exists(args.output)): print(\"Output directory doesn't exist! Creating...\") os.makedirs(args.output) model = define_model(is_resnet=False,", "= torch.nn.DataParallel(model).cuda() model.load_state_dict(torch.load('./pretrained_model/model_senet')) model.eval() print img = cv2.imread(args.img) nyu2_loader = loaddata.readNyu2(args.img) test(nyu2_loader, model,", "width, height): for i, image in enumerate(nyu2_loader): image = torch.autograd.Variable(image, volatile=True).cuda() out =", "out.view(out.size(2),out.size(3)).data.cpu().numpy() max_pix = out.max() min_pix = out.min() out = (out-min_pix)/(max_pix-min_pix)*255 out = cv2.resize(out,(width,height),interpolation=cv2.INTER_CUBIC)", "metavar='DIR',default=\"./input/test.json\", help='json file to input') parser.add_argument('--output', metavar='DIR',default=\"./output\", help='dir to output') args=parser.parse_args() def define_model(is_resnet,", "file to input') parser.add_argument('--output', metavar='DIR',default=\"./output\", help='dir to output') args=parser.parse_args() def define_model(is_resnet, is_densenet, is_senet):", "args.json) print(\"Volume:\") print(vol) print(\"unit: cm^3\") out_file = open(os.path.join(args.output, \"out.txt\"), \"w\") out_file.write(\"Volume:\\n\") out_file.write(str(vol)) out_file.write(\"\\n\")", "is_densenet: original_model = densenet.densenet161(pretrained=True) Encoder = modules.E_densenet(original_model) model = net.model(Encoder, num_features=2208, block_channel =", "import modules, net, resnet, densenet, senet import numpy as np import loaddata_demo as", "= senet.senet154(pretrained='imagenet') Encoder = modules.E_senet(original_model) model = net.model(Encoder, num_features=2048, block_channel = [256, 512,", "matplotlib.image import matplotlib.pyplot as plt parser = argparse.ArgumentParser(description='KD-network') parser.add_argument('--img', metavar='DIR',default=\"./input/test.jpg\", help='img to input')", "model def main(): if (not os.path.exists(args.output)): print(\"Output directory doesn't exist! Creating...\") os.makedirs(args.output) model", "cv2.applyColorMap(out_grey, cv2.COLORMAP_JET) cv2.imwrite(os.path.join(args.output, \"out_color.png\"),out_color) vol = get_volume(out_grey, args.json) print(\"Volume:\") print(vol) print(\"unit: cm^3\") out_file", "net, resnet, densenet, senet import numpy as np import loaddata_demo as loaddata import", "def main(): if (not os.path.exists(args.output)): print(\"Output directory doesn't exist! Creating...\") os.makedirs(args.output) model =", "print(\"Volume:\") print(vol) print(\"unit: cm^3\") out_file = open(os.path.join(args.output, \"out.txt\"), \"w\") out_file.write(\"Volume:\\n\") out_file.write(str(vol)) out_file.write(\"\\n\") out_file.write(\"unit:", "import os import torch.nn.parallel import modules, net, resnet, densenet, senet import numpy as", "1024, 2048]) return model def main(): if (not os.path.exists(args.output)): print(\"Output directory doesn't exist!", "modules.E_densenet(original_model) model = net.model(Encoder, num_features=2208, block_channel = [192, 384, 1056, 2208]) if is_senet:", "model = net.model(Encoder, num_features=2208, block_channel = [192, 384, 1056, 2208]) if is_senet: original_model", "1056, 2208]) if is_senet: original_model = senet.senet154(pretrained='imagenet') Encoder = modules.E_senet(original_model) model = net.model(Encoder,", "2048]) return model def main(): if (not os.path.exists(args.output)): print(\"Output directory doesn't exist! Creating...\")", "num_features=2208, block_channel = [192, 384, 1056, 2208]) if is_senet: original_model = senet.senet154(pretrained='imagenet') Encoder", "if (not os.path.exists(args.output)): print(\"Output directory doesn't exist! Creating...\") os.makedirs(args.output) model = define_model(is_resnet=False, is_densenet=False,", "numpy as np import loaddata_demo as loaddata import pdb import argparse from volume", "img.shape[0]) def test(nyu2_loader, model, width, height): for i, image in enumerate(nyu2_loader): image =", "= cv2.applyColorMap(out_grey, cv2.COLORMAP_JET) cv2.imwrite(os.path.join(args.output, \"out_color.png\"),out_color) vol = get_volume(out_grey, args.json) print(\"Volume:\") print(vol) print(\"unit: cm^3\")", "= loaddata.readNyu2(args.img) test(nyu2_loader, model, img.shape[1], img.shape[0]) def test(nyu2_loader, model, width, height): for i,", "out_file = open(os.path.join(args.output, \"out.txt\"), \"w\") out_file.write(\"Volume:\\n\") out_file.write(str(vol)) out_file.write(\"\\n\") out_file.write(\"unit: cm^3\") out_file.close() get_mask(out_grey, args.json,", "import argparse from volume import get_volume from mask import get_mask import matplotlib.image import", "cv2.imwrite(os.path.join(args.output, \"out_grey.png\"),out) out_grey = cv2.imread(os.path.join(args.output, \"out_grey.png\"),0) out_color = cv2.applyColorMap(out_grey, cv2.COLORMAP_JET) cv2.imwrite(os.path.join(args.output, \"out_color.png\"),out_color) vol", "torch import cv2 import os import torch.nn.parallel import modules, net, resnet, densenet, senet", "as plt parser = argparse.ArgumentParser(description='KD-network') parser.add_argument('--img', metavar='DIR',default=\"./input/test.jpg\", help='img to input') parser.add_argument('--json', metavar='DIR',default=\"./input/test.json\", help='json", "True) Encoder = modules.E_resnet(original_model) model = net.model(Encoder, num_features=2048, block_channel = [256, 512, 1024,", "min_pix = out.min() out = (out-min_pix)/(max_pix-min_pix)*255 out = cv2.resize(out,(width,height),interpolation=cv2.INTER_CUBIC) cv2.imwrite(os.path.join(args.output, \"out_grey.png\"),out) out_grey =", "2048]) if is_densenet: original_model = densenet.densenet161(pretrained=True) Encoder = modules.E_densenet(original_model) model = net.model(Encoder, num_features=2208,", "cv2.imread(os.path.join(args.output, \"out_grey.png\"),0) out_color = cv2.applyColorMap(out_grey, cv2.COLORMAP_JET) cv2.imwrite(os.path.join(args.output, \"out_color.png\"),out_color) vol = get_volume(out_grey, args.json) print(\"Volume:\")", "def test(nyu2_loader, model, width, height): for i, image in enumerate(nyu2_loader): image = torch.autograd.Variable(image,", "out = out.view(out.size(2),out.size(3)).data.cpu().numpy() max_pix = out.max() min_pix = out.min() out = (out-min_pix)/(max_pix-min_pix)*255 out", "= out.min() out = (out-min_pix)/(max_pix-min_pix)*255 out = cv2.resize(out,(width,height),interpolation=cv2.INTER_CUBIC) cv2.imwrite(os.path.join(args.output, \"out_grey.png\"),out) out_grey = cv2.imread(os.path.join(args.output,", "open(os.path.join(args.output, \"out.txt\"), \"w\") out_file.write(\"Volume:\\n\") out_file.write(str(vol)) out_file.write(\"\\n\") out_file.write(\"unit: cm^3\") out_file.close() get_mask(out_grey, args.json, args.output) if", "net.model(Encoder, num_features=2048, block_channel = [256, 512, 1024, 2048]) if is_densenet: original_model = densenet.densenet161(pretrained=True)", "Creating...\") os.makedirs(args.output) model = define_model(is_resnet=False, is_densenet=False, is_senet=True) model = torch.nn.DataParallel(model).cuda() model.load_state_dict(torch.load('./pretrained_model/model_senet')) model.eval() print", "help='json file to input') parser.add_argument('--output', metavar='DIR',default=\"./output\", help='dir to output') args=parser.parse_args() def define_model(is_resnet, is_densenet,", "as np import loaddata_demo as loaddata import pdb import argparse from volume import", "512, 1024, 2048]) return model def main(): if (not os.path.exists(args.output)): print(\"Output directory doesn't", "senet import numpy as np import loaddata_demo as loaddata import pdb import argparse", "os import torch.nn.parallel import modules, net, resnet, densenet, senet import numpy as np", "is_senet): if is_resnet: original_model = resnet.resnet50(pretrained = True) Encoder = modules.E_resnet(original_model) model =", "num_features=2048, block_channel = [256, 512, 1024, 2048]) return model def main(): if (not", "\"out_grey.png\"),0) out_color = cv2.applyColorMap(out_grey, cv2.COLORMAP_JET) cv2.imwrite(os.path.join(args.output, \"out_color.png\"),out_color) vol = get_volume(out_grey, args.json) print(\"Volume:\") print(vol)", "out.max() min_pix = out.min() out = (out-min_pix)/(max_pix-min_pix)*255 out = cv2.resize(out,(width,height),interpolation=cv2.INTER_CUBIC) cv2.imwrite(os.path.join(args.output, \"out_grey.png\"),out) out_grey", "= cv2.imread(args.img) nyu2_loader = loaddata.readNyu2(args.img) test(nyu2_loader, model, img.shape[1], img.shape[0]) def test(nyu2_loader, model, width,", "= torch.autograd.Variable(image, volatile=True).cuda() out = model(image) out = out.view(out.size(2),out.size(3)).data.cpu().numpy() max_pix = out.max() min_pix", "print(\"Output directory doesn't exist! Creating...\") os.makedirs(args.output) model = define_model(is_resnet=False, is_densenet=False, is_senet=True) model =", "= [192, 384, 1056, 2208]) if is_senet: original_model = senet.senet154(pretrained='imagenet') Encoder = modules.E_senet(original_model)", "[192, 384, 1056, 2208]) if is_senet: original_model = senet.senet154(pretrained='imagenet') Encoder = modules.E_senet(original_model) model", "= modules.E_densenet(original_model) model = net.model(Encoder, num_features=2208, block_channel = [192, 384, 1056, 2208]) if", "= cv2.resize(out,(width,height),interpolation=cv2.INTER_CUBIC) cv2.imwrite(os.path.join(args.output, \"out_grey.png\"),out) out_grey = cv2.imread(os.path.join(args.output, \"out_grey.png\"),0) out_color = cv2.applyColorMap(out_grey, cv2.COLORMAP_JET) cv2.imwrite(os.path.join(args.output,", "directory doesn't exist! Creating...\") os.makedirs(args.output) model = define_model(is_resnet=False, is_densenet=False, is_senet=True) model = torch.nn.DataParallel(model).cuda()", "out_file.write(\"Volume:\\n\") out_file.write(str(vol)) out_file.write(\"\\n\") out_file.write(\"unit: cm^3\") out_file.close() get_mask(out_grey, args.json, args.output) if __name__ == '__main__':" ]
[ "fake_msg: group_forward_msg = await fake_forward_msg(at, event.group_id, fake_msg) await send_group_forward_msg(event.group_id, group_forward_msg) else: await fake.finish(\"参数有误~\")", "for i in msg: if i.type == 'text': if i.data[\"text\"].strip(): fake_msg.append(MessageSegment.text(i.data[\"text\"].strip())) else: fake_msg.append(i)", "from utils.message_builder import fake_forward_msg from utils.utils import send_group_forward_msg fake = on_command('fake', aliases={\"假消息\"}, priority=5,", "Message = CommandArg()): at = [] msg = [] fake_msg = [] for", "if i.type == 'text': if i.data[\"text\"].strip(): fake_msg.append(MessageSegment.text(i.data[\"text\"].strip())) else: fake_msg.append(i) if at and fake_msg:", "fake = on_command('fake', aliases={\"假消息\"}, priority=5, block=True) @fake.handle() async def _(event: GroupMessageEvent, args: Message", "args: if i.type == 'at': if i.data[\"qq\"] == 'all': continue else: at.append(i.data[\"qq\"]) else:", "else: fake_msg.append(i) if at and fake_msg: group_forward_msg = await fake_forward_msg(at, event.group_id, fake_msg) await", "import fake_forward_msg from utils.utils import send_group_forward_msg fake = on_command('fake', aliases={\"假消息\"}, priority=5, block=True) @fake.handle()", "i.type == 'at': if i.data[\"qq\"] == 'all': continue else: at.append(i.data[\"qq\"]) else: msg.append(i) for", "on_command('fake', aliases={\"假消息\"}, priority=5, block=True) @fake.handle() async def _(event: GroupMessageEvent, args: Message = CommandArg()):", "block=True) @fake.handle() async def _(event: GroupMessageEvent, args: Message = CommandArg()): at = []", "MessageSegment from nonebot.params import CommandArg from utils.message_builder import fake_forward_msg from utils.utils import send_group_forward_msg", "'all': continue else: at.append(i.data[\"qq\"]) else: msg.append(i) for i in msg: if i.type ==", "else: msg.append(i) for i in msg: if i.type == 'text': if i.data[\"text\"].strip(): fake_msg.append(MessageSegment.text(i.data[\"text\"].strip()))", "at = [] msg = [] fake_msg = [] for i in args:", "on_command from nonebot.adapters.onebot.v11 import GroupMessageEvent, Message, MessageSegment from nonebot.params import CommandArg from utils.message_builder", "[] for i in args: if i.type == 'at': if i.data[\"qq\"] == 'all':", "CommandArg()): at = [] msg = [] fake_msg = [] for i in", "= [] for i in args: if i.type == 'at': if i.data[\"qq\"] ==", "== 'all': continue else: at.append(i.data[\"qq\"]) else: msg.append(i) for i in msg: if i.type", "utils.utils import send_group_forward_msg fake = on_command('fake', aliases={\"假消息\"}, priority=5, block=True) @fake.handle() async def _(event:", "= CommandArg()): at = [] msg = [] fake_msg = [] for i", "CommandArg from utils.message_builder import fake_forward_msg from utils.utils import send_group_forward_msg fake = on_command('fake', aliases={\"假消息\"},", "= [] msg = [] fake_msg = [] for i in args: if", "fake_msg.append(MessageSegment.text(i.data[\"text\"].strip())) else: fake_msg.append(i) if at and fake_msg: group_forward_msg = await fake_forward_msg(at, event.group_id, fake_msg)", "if i.type == 'at': if i.data[\"qq\"] == 'all': continue else: at.append(i.data[\"qq\"]) else: msg.append(i)", "from nonebot.adapters.onebot.v11 import GroupMessageEvent, Message, MessageSegment from nonebot.params import CommandArg from utils.message_builder import", "i.type == 'text': if i.data[\"text\"].strip(): fake_msg.append(MessageSegment.text(i.data[\"text\"].strip())) else: fake_msg.append(i) if at and fake_msg: group_forward_msg", "and fake_msg: group_forward_msg = await fake_forward_msg(at, event.group_id, fake_msg) await send_group_forward_msg(event.group_id, group_forward_msg) else: await", "import CommandArg from utils.message_builder import fake_forward_msg from utils.utils import send_group_forward_msg fake = on_command('fake',", "at.append(i.data[\"qq\"]) else: msg.append(i) for i in msg: if i.type == 'text': if i.data[\"text\"].strip():", "== 'at': if i.data[\"qq\"] == 'all': continue else: at.append(i.data[\"qq\"]) else: msg.append(i) for i", "if i.data[\"qq\"] == 'all': continue else: at.append(i.data[\"qq\"]) else: msg.append(i) for i in msg:", "continue else: at.append(i.data[\"qq\"]) else: msg.append(i) for i in msg: if i.type == 'text':", "i.data[\"qq\"] == 'all': continue else: at.append(i.data[\"qq\"]) else: msg.append(i) for i in msg: if", "_(event: GroupMessageEvent, args: Message = CommandArg()): at = [] msg = [] fake_msg", "from nonebot import on_command from nonebot.adapters.onebot.v11 import GroupMessageEvent, Message, MessageSegment from nonebot.params import", "import GroupMessageEvent, Message, MessageSegment from nonebot.params import CommandArg from utils.message_builder import fake_forward_msg from", "aliases={\"假消息\"}, priority=5, block=True) @fake.handle() async def _(event: GroupMessageEvent, args: Message = CommandArg()): at", "for i in args: if i.type == 'at': if i.data[\"qq\"] == 'all': continue", "[] msg = [] fake_msg = [] for i in args: if i.type", "i in args: if i.type == 'at': if i.data[\"qq\"] == 'all': continue else:", "in args: if i.type == 'at': if i.data[\"qq\"] == 'all': continue else: at.append(i.data[\"qq\"])", "nonebot.params import CommandArg from utils.message_builder import fake_forward_msg from utils.utils import send_group_forward_msg fake =", "fake_msg = [] for i in args: if i.type == 'at': if i.data[\"qq\"]", "msg: if i.type == 'text': if i.data[\"text\"].strip(): fake_msg.append(MessageSegment.text(i.data[\"text\"].strip())) else: fake_msg.append(i) if at and", "== 'text': if i.data[\"text\"].strip(): fake_msg.append(MessageSegment.text(i.data[\"text\"].strip())) else: fake_msg.append(i) if at and fake_msg: group_forward_msg =", "import on_command from nonebot.adapters.onebot.v11 import GroupMessageEvent, Message, MessageSegment from nonebot.params import CommandArg from", "'text': if i.data[\"text\"].strip(): fake_msg.append(MessageSegment.text(i.data[\"text\"].strip())) else: fake_msg.append(i) if at and fake_msg: group_forward_msg = await", "@fake.handle() async def _(event: GroupMessageEvent, args: Message = CommandArg()): at = [] msg", "GroupMessageEvent, Message, MessageSegment from nonebot.params import CommandArg from utils.message_builder import fake_forward_msg from utils.utils", "args: Message = CommandArg()): at = [] msg = [] fake_msg = []", "priority=5, block=True) @fake.handle() async def _(event: GroupMessageEvent, args: Message = CommandArg()): at =", "def _(event: GroupMessageEvent, args: Message = CommandArg()): at = [] msg = []", "utils.message_builder import fake_forward_msg from utils.utils import send_group_forward_msg fake = on_command('fake', aliases={\"假消息\"}, priority=5, block=True)", "msg.append(i) for i in msg: if i.type == 'text': if i.data[\"text\"].strip(): fake_msg.append(MessageSegment.text(i.data[\"text\"].strip())) else:", "nonebot.adapters.onebot.v11 import GroupMessageEvent, Message, MessageSegment from nonebot.params import CommandArg from utils.message_builder import fake_forward_msg", "nonebot import on_command from nonebot.adapters.onebot.v11 import GroupMessageEvent, Message, MessageSegment from nonebot.params import CommandArg", "GroupMessageEvent, args: Message = CommandArg()): at = [] msg = [] fake_msg =", "async def _(event: GroupMessageEvent, args: Message = CommandArg()): at = [] msg =", "i.data[\"text\"].strip(): fake_msg.append(MessageSegment.text(i.data[\"text\"].strip())) else: fake_msg.append(i) if at and fake_msg: group_forward_msg = await fake_forward_msg(at, event.group_id,", "msg = [] fake_msg = [] for i in args: if i.type ==", "from utils.utils import send_group_forward_msg fake = on_command('fake', aliases={\"假消息\"}, priority=5, block=True) @fake.handle() async def", "Message, MessageSegment from nonebot.params import CommandArg from utils.message_builder import fake_forward_msg from utils.utils import", "send_group_forward_msg fake = on_command('fake', aliases={\"假消息\"}, priority=5, block=True) @fake.handle() async def _(event: GroupMessageEvent, args:", "i in msg: if i.type == 'text': if i.data[\"text\"].strip(): fake_msg.append(MessageSegment.text(i.data[\"text\"].strip())) else: fake_msg.append(i) if", "if i.data[\"text\"].strip(): fake_msg.append(MessageSegment.text(i.data[\"text\"].strip())) else: fake_msg.append(i) if at and fake_msg: group_forward_msg = await fake_forward_msg(at,", "[] fake_msg = [] for i in args: if i.type == 'at': if", "from nonebot.params import CommandArg from utils.message_builder import fake_forward_msg from utils.utils import send_group_forward_msg fake", "'at': if i.data[\"qq\"] == 'all': continue else: at.append(i.data[\"qq\"]) else: msg.append(i) for i in", "= [] fake_msg = [] for i in args: if i.type == 'at':", "if at and fake_msg: group_forward_msg = await fake_forward_msg(at, event.group_id, fake_msg) await send_group_forward_msg(event.group_id, group_forward_msg)", "else: at.append(i.data[\"qq\"]) else: msg.append(i) for i in msg: if i.type == 'text': if", "= on_command('fake', aliases={\"假消息\"}, priority=5, block=True) @fake.handle() async def _(event: GroupMessageEvent, args: Message =", "fake_msg.append(i) if at and fake_msg: group_forward_msg = await fake_forward_msg(at, event.group_id, fake_msg) await send_group_forward_msg(event.group_id,", "import send_group_forward_msg fake = on_command('fake', aliases={\"假消息\"}, priority=5, block=True) @fake.handle() async def _(event: GroupMessageEvent,", "in msg: if i.type == 'text': if i.data[\"text\"].strip(): fake_msg.append(MessageSegment.text(i.data[\"text\"].strip())) else: fake_msg.append(i) if at", "fake_forward_msg from utils.utils import send_group_forward_msg fake = on_command('fake', aliases={\"假消息\"}, priority=5, block=True) @fake.handle() async", "at and fake_msg: group_forward_msg = await fake_forward_msg(at, event.group_id, fake_msg) await send_group_forward_msg(event.group_id, group_forward_msg) else:", "<filename>plugins/fake_msg/__init__.py from nonebot import on_command from nonebot.adapters.onebot.v11 import GroupMessageEvent, Message, MessageSegment from nonebot.params" ]
[ "dependencies = [ ('features', '0016_auto_20190605_1830'), ] operations = [ migrations.AlterField( model_name='feature', name='apps', field=models.ManyToManyField(related_name='app_features',", "by Django 2.1.5 on 2019-09-11 07:50 from django.db import migrations, models class Migration(migrations.Migration):", "on 2019-09-11 07:50 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('features', '0016_auto_20190605_1830'), ] operations =", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('features', '0016_auto_20190605_1830'), ]", "migrations, models class Migration(migrations.Migration): dependencies = [ ('features', '0016_auto_20190605_1830'), ] operations = [", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('features', '0016_auto_20190605_1830'), ] operations", "[ ('features', '0016_auto_20190605_1830'), ] operations = [ migrations.AlterField( model_name='feature', name='apps', field=models.ManyToManyField(related_name='app_features', to='applications.Application', verbose_name='关联应用'),", "# Generated by Django 2.1.5 on 2019-09-11 07:50 from django.db import migrations, models", "models class Migration(migrations.Migration): dependencies = [ ('features', '0016_auto_20190605_1830'), ] operations = [ migrations.AlterField(", "'0016_auto_20190605_1830'), ] operations = [ migrations.AlterField( model_name='feature', name='apps', field=models.ManyToManyField(related_name='app_features', to='applications.Application', verbose_name='关联应用'), ), ]", "= [ ('features', '0016_auto_20190605_1830'), ] operations = [ migrations.AlterField( model_name='feature', name='apps', field=models.ManyToManyField(related_name='app_features', to='applications.Application',", "('features', '0016_auto_20190605_1830'), ] operations = [ migrations.AlterField( model_name='feature', name='apps', field=models.ManyToManyField(related_name='app_features', to='applications.Application', verbose_name='关联应用'), ),", "2.1.5 on 2019-09-11 07:50 from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "Generated by Django 2.1.5 on 2019-09-11 07:50 from django.db import migrations, models class", "07:50 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('features', '0016_auto_20190605_1830'),", "class Migration(migrations.Migration): dependencies = [ ('features', '0016_auto_20190605_1830'), ] operations = [ migrations.AlterField( model_name='feature',", "Migration(migrations.Migration): dependencies = [ ('features', '0016_auto_20190605_1830'), ] operations = [ migrations.AlterField( model_name='feature', name='apps',", "2019-09-11 07:50 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('features',", "Django 2.1.5 on 2019-09-11 07:50 from django.db import migrations, models class Migration(migrations.Migration): dependencies" ]
[ "blog = models.ForeignKey(Blog, on_delete=models.CASCADE, related_name=\"likes\") user = models.ForeignKey(User, on_delete=models.CASCADE, related_name=\"liked\") is_like = models.BooleanField(default=True)", "timezone from django.utils.text import slugify from django.utils.translation import gettext_lazy as _ from modelcluster.fields", "import ClusterableModel from wagtail.core.fields import RichTextField from hextech_core.core.models.base_model import BaseModel, MetadataModel from hextech_core.core.utils", "if not self.pk: self.tag = self.tagger(self.tag) return super().save() class Blog(ClusterableModel, MetadataModel): id =", "on_delete=models.SET_NULL, related_name=\"blogs\", db_index=True, null=True, ) title = models.CharField(max_length=400) content = RichTextField() slug =", "slugify from django.utils.translation import gettext_lazy as _ from modelcluster.fields import ParentalKey from modelcluster.models", "self.tagger(self.tag) return super().save() class Blog(ClusterableModel, MetadataModel): id = models.BigIntegerField( _(\"Random id\"), default=RandomID(\"blog.Blog\"), primary_key=True", "import RichTextField from hextech_core.core.models.base_model import BaseModel, MetadataModel from hextech_core.core.utils import no_accent_vietnamese from hextech_core.core.utils.id", "models.CharField(max_length=255) created_by = models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"+\", null=True, blank=True ) def __str__(self): return", "= models.ForeignKey( \"self\", on_delete=models.PROTECT, related_name=\"child_categories\", null=True, blank=True, ) name = models.CharField(max_length=100) slug =", "= (\"author\", \"title\") def save(self, *args, **kwargs): print(self.__dict__) if self.published and not self.published_at:", ") def __str__(self): return f\"#{self.blog.id} - {self.title if self.title else 'Untitled'}\" class BlogLike(BaseModel):", "from wagtail.core.fields import RichTextField from hextech_core.core.models.base_model import BaseModel, MetadataModel from hextech_core.core.utils import no_accent_vietnamese", "= models.CharField(max_length=255) created_by = models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"+\", null=True, blank=True ) def __str__(self):", "not self.published_at: self.published_at = timezone.now() self.slug = f\"{slugify(self.title)}-{self.author.id}\" super().save(*args, **kwargs) def __str__(self): return", "BlogCategory(MetadataModel): parent = models.ForeignKey( \"self\", on_delete=models.PROTECT, related_name=\"child_categories\", null=True, blank=True, ) name = models.CharField(max_length=100)", "def save(self, *args, **kwargs): if not self.pk: self.tag = self.tagger(self.tag) return super().save() class", "related_name=\"blogs\", db_index=True, null=True, ) title = models.CharField(max_length=400) content = RichTextField() slug = models.SlugField(blank=True,", "Meta: unique_together = (\"author\", \"title\") def save(self, *args, **kwargs): print(self.__dict__) if self.published and", "import RandomID from hextech_core.users.models import User class BlogCategory(MetadataModel): parent = models.ForeignKey( \"self\", on_delete=models.PROTECT,", "= models.BigIntegerField( _(\"Random id\"), default=RandomID(\"blog.Blog\"), primary_key=True ) author = models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"blogs\",", "= models.ForeignKey(User, on_delete=models.CASCADE, related_name=\"liked\") is_like = models.BooleanField(default=True) class Meta: unique_together = (\"blog\", \"user\")", "**kwargs) def __str__(self): return self.name class BlogTag(BaseModel): tag = models.CharField(max_length=50, unique=True) def __str__(self):", "def tagger(cls, tag: str) -> str: tag = no_accent_vietnamese(tag) tag = \"\".join([ele.title() for", "name = models.CharField(max_length=100) slug = models.SlugField(blank=True, unique=True, db_index=True) def save(self, *args, **kwargs): self.slug", "tags = models.ManyToManyField(BlogTag, blank=True) published = models.BooleanField(default=True) published_at = models.DateTimeField(null=True, blank=True) class Meta:", "= models.BooleanField(default=True) published_at = models.DateTimeField(null=True, blank=True) class Meta: unique_together = (\"author\", \"title\") def", "= models.CharField(max_length=400) content = RichTextField() slug = models.SlugField(blank=True, unique=True, db_index=True, max_length=450) tags =", "related_name=\"blogs\", db_index=True ) category = models.ForeignKey( BlogCategory, on_delete=models.SET_NULL, related_name=\"blogs\", db_index=True, null=True, ) title", ") author = models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"blogs\", db_index=True ) category = models.ForeignKey( BlogCategory,", "from hextech_core.core.utils import no_accent_vietnamese from hextech_core.core.utils.id import RandomID from hextech_core.users.models import User class", "models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"+\", null=True, blank=True ) def __str__(self): return f\"#{self.blog.id} - {self.title", "MetadataModel from hextech_core.core.utils import no_accent_vietnamese from hextech_core.core.utils.id import RandomID from hextech_core.users.models import User", "BlogCategory, on_delete=models.SET_NULL, related_name=\"blogs\", db_index=True, null=True, ) title = models.CharField(max_length=400) content = RichTextField() slug", "= RichTextField() slug = models.SlugField(blank=True, unique=True, db_index=True, max_length=450) tags = models.ManyToManyField(BlogTag, blank=True) published", "_ from modelcluster.fields import ParentalKey from modelcluster.models import ClusterableModel from wagtail.core.fields import RichTextField", "models.CharField(max_length=50, unique=True) def __str__(self): return self.tag @classmethod def tagger(cls, tag: str) -> str:", "= models.DateTimeField(null=True, blank=True) class Meta: unique_together = (\"author\", \"title\") def save(self, *args, **kwargs):", "models.DateTimeField(null=True, blank=True) class Meta: unique_together = (\"author\", \"title\") def save(self, *args, **kwargs): print(self.__dict__)", "BaseModel, MetadataModel from hextech_core.core.utils import no_accent_vietnamese from hextech_core.core.utils.id import RandomID from hextech_core.users.models import", "self.published_at: self.published_at = timezone.now() self.slug = f\"{slugify(self.title)}-{self.author.id}\" super().save(*args, **kwargs) def __str__(self): return self.title", "import gettext_lazy as _ from modelcluster.fields import ParentalKey from modelcluster.models import ClusterableModel from", "published_at = models.DateTimeField(null=True, blank=True) class Meta: unique_together = (\"author\", \"title\") def save(self, *args,", "hextech_core.core.models.base_model import BaseModel, MetadataModel from hextech_core.core.utils import no_accent_vietnamese from hextech_core.core.utils.id import RandomID from", "wagtail.core.fields import RichTextField from hextech_core.core.models.base_model import BaseModel, MetadataModel from hextech_core.core.utils import no_accent_vietnamese from", "tag = no_accent_vietnamese(tag) tag = \"\".join([ele.title() for ele in tag.split(\" \")]) return tag", "\"title\") def save(self, *args, **kwargs): print(self.__dict__) if self.published and not self.published_at: self.published_at =", "django.utils.translation import gettext_lazy as _ from modelcluster.fields import ParentalKey from modelcluster.models import ClusterableModel", "import timezone from django.utils.text import slugify from django.utils.translation import gettext_lazy as _ from", "gettext_lazy as _ from modelcluster.fields import ParentalKey from modelcluster.models import ClusterableModel from wagtail.core.fields", "class BlogLike(BaseModel): blog = models.ForeignKey(Blog, on_delete=models.CASCADE, related_name=\"likes\") user = models.ForeignKey(User, on_delete=models.CASCADE, related_name=\"liked\") is_like", "str: tag = no_accent_vietnamese(tag) tag = \"\".join([ele.title() for ele in tag.split(\" \")]) return", "MetadataModel): id = models.BigIntegerField( _(\"Random id\"), default=RandomID(\"blog.Blog\"), primary_key=True ) author = models.ForeignKey( \"users.User\",", "= models.SlugField(blank=True, unique=True, db_index=True, max_length=450) tags = models.ManyToManyField(BlogTag, blank=True) published = models.BooleanField(default=True) published_at", "= models.ForeignKey( BlogCategory, on_delete=models.SET_NULL, related_name=\"blogs\", db_index=True, null=True, ) title = models.CharField(max_length=400) content =", "models.BooleanField(default=True) published_at = models.DateTimeField(null=True, blank=True) class Meta: unique_together = (\"author\", \"title\") def save(self,", "return tag def save(self, *args, **kwargs): if not self.pk: self.tag = self.tagger(self.tag) return", "category = models.ForeignKey( BlogCategory, on_delete=models.SET_NULL, related_name=\"blogs\", db_index=True, null=True, ) title = models.CharField(max_length=400) content", "import User class BlogCategory(MetadataModel): parent = models.ForeignKey( \"self\", on_delete=models.PROTECT, related_name=\"child_categories\", null=True, blank=True, )", "self.name class BlogTag(BaseModel): tag = models.CharField(max_length=50, unique=True) def __str__(self): return self.tag @classmethod def", "content = RichTextField() slug = models.SlugField(blank=True, unique=True, db_index=True, max_length=450) tags = models.ManyToManyField(BlogTag, blank=True)", "self.published_at = timezone.now() self.slug = f\"{slugify(self.title)}-{self.author.id}\" super().save(*args, **kwargs) def __str__(self): return self.title class", "null=True, blank=True ) def __str__(self): return f\"#{self.blog.id} - {self.title if self.title else 'Untitled'}\"", "self.title else 'Untitled'}\" class BlogLike(BaseModel): blog = models.ForeignKey(Blog, on_delete=models.CASCADE, related_name=\"likes\") user = models.ForeignKey(User,", "'Untitled'}\" class BlogLike(BaseModel): blog = models.ForeignKey(Blog, on_delete=models.CASCADE, related_name=\"likes\") user = models.ForeignKey(User, on_delete=models.CASCADE, related_name=\"liked\")", "def __str__(self): return self.title class BlogComment(BaseModel): blog = ParentalKey(Blog, on_delete=models.CASCADE, related_name=\"comments\") content =", "= models.ForeignKey(Blog, on_delete=models.CASCADE, related_name=\"likes\") user = models.ForeignKey(User, on_delete=models.CASCADE, related_name=\"liked\") is_like = models.BooleanField(default=True) class", "= slugify(self.name) super().save(*args, **kwargs) def __str__(self): return self.name class BlogTag(BaseModel): tag = models.CharField(max_length=50,", "db_index=True) def save(self, *args, **kwargs): self.slug = slugify(self.name) super().save(*args, **kwargs) def __str__(self): return", "save(self, *args, **kwargs): if not self.pk: self.tag = self.tagger(self.tag) return super().save() class Blog(ClusterableModel,", "__str__(self): return self.tag @classmethod def tagger(cls, tag: str) -> str: tag = no_accent_vietnamese(tag)", "= models.CharField(max_length=100) slug = models.SlugField(blank=True, unique=True, db_index=True) def save(self, *args, **kwargs): self.slug =", "self.published and not self.published_at: self.published_at = timezone.now() self.slug = f\"{slugify(self.title)}-{self.author.id}\" super().save(*args, **kwargs) def", "*args, **kwargs): self.slug = slugify(self.name) super().save(*args, **kwargs) def __str__(self): return self.name class BlogTag(BaseModel):", "no_accent_vietnamese from hextech_core.core.utils.id import RandomID from hextech_core.users.models import User class BlogCategory(MetadataModel): parent =", "models from django.utils import timezone from django.utils.text import slugify from django.utils.translation import gettext_lazy", "__str__(self): return self.title class BlogComment(BaseModel): blog = ParentalKey(Blog, on_delete=models.CASCADE, related_name=\"comments\") content = RichTextField()", "RichTextField from hextech_core.core.models.base_model import BaseModel, MetadataModel from hextech_core.core.utils import no_accent_vietnamese from hextech_core.core.utils.id import", "self.tag = self.tagger(self.tag) return super().save() class Blog(ClusterableModel, MetadataModel): id = models.BigIntegerField( _(\"Random id\"),", "= models.ManyToManyField(BlogTag, blank=True) published = models.BooleanField(default=True) published_at = models.DateTimeField(null=True, blank=True) class Meta: unique_together", "hextech_core.core.utils import no_accent_vietnamese from hextech_core.core.utils.id import RandomID from hextech_core.users.models import User class BlogCategory(MetadataModel):", "id = models.BigIntegerField( _(\"Random id\"), default=RandomID(\"blog.Blog\"), primary_key=True ) author = models.ForeignKey( \"users.User\", on_delete=models.CASCADE,", "ParentalKey(Blog, on_delete=models.CASCADE, related_name=\"comments\") content = RichTextField() title = models.CharField(max_length=255) created_by = models.ForeignKey( \"users.User\",", "models.BigIntegerField( _(\"Random id\"), default=RandomID(\"blog.Blog\"), primary_key=True ) author = models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"blogs\", db_index=True", "if self.title else 'Untitled'}\" class BlogLike(BaseModel): blog = models.ForeignKey(Blog, on_delete=models.CASCADE, related_name=\"likes\") user =", "return super().save() class Blog(ClusterableModel, MetadataModel): id = models.BigIntegerField( _(\"Random id\"), default=RandomID(\"blog.Blog\"), primary_key=True )", "timezone.now() self.slug = f\"{slugify(self.title)}-{self.author.id}\" super().save(*args, **kwargs) def __str__(self): return self.title class BlogComment(BaseModel): blog", "models.ForeignKey( BlogCategory, on_delete=models.SET_NULL, related_name=\"blogs\", db_index=True, null=True, ) title = models.CharField(max_length=400) content = RichTextField()", "__str__(self): return self.name class BlogTag(BaseModel): tag = models.CharField(max_length=50, unique=True) def __str__(self): return self.tag", "class BlogComment(BaseModel): blog = ParentalKey(Blog, on_delete=models.CASCADE, related_name=\"comments\") content = RichTextField() title = models.CharField(max_length=255)", "tag: str) -> str: tag = no_accent_vietnamese(tag) tag = \"\".join([ele.title() for ele in", "Blog(ClusterableModel, MetadataModel): id = models.BigIntegerField( _(\"Random id\"), default=RandomID(\"blog.Blog\"), primary_key=True ) author = models.ForeignKey(", "no_accent_vietnamese(tag) tag = \"\".join([ele.title() for ele in tag.split(\" \")]) return tag def save(self,", "def save(self, *args, **kwargs): print(self.__dict__) if self.published and not self.published_at: self.published_at = timezone.now()", "id\"), default=RandomID(\"blog.Blog\"), primary_key=True ) author = models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"blogs\", db_index=True ) category", "from django.utils.translation import gettext_lazy as _ from modelcluster.fields import ParentalKey from modelcluster.models import", "models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"blogs\", db_index=True ) category = models.ForeignKey( BlogCategory, on_delete=models.SET_NULL, related_name=\"blogs\", db_index=True,", "models.SlugField(blank=True, unique=True, db_index=True, max_length=450) tags = models.ManyToManyField(BlogTag, blank=True) published = models.BooleanField(default=True) published_at =", "django.utils.text import slugify from django.utils.translation import gettext_lazy as _ from modelcluster.fields import ParentalKey", "slugify(self.name) super().save(*args, **kwargs) def __str__(self): return self.name class BlogTag(BaseModel): tag = models.CharField(max_length=50, unique=True)", "return self.name class BlogTag(BaseModel): tag = models.CharField(max_length=50, unique=True) def __str__(self): return self.tag @classmethod", "from hextech_core.users.models import User class BlogCategory(MetadataModel): parent = models.ForeignKey( \"self\", on_delete=models.PROTECT, related_name=\"child_categories\", null=True,", "from modelcluster.fields import ParentalKey from modelcluster.models import ClusterableModel from wagtail.core.fields import RichTextField from", "class BlogCategory(MetadataModel): parent = models.ForeignKey( \"self\", on_delete=models.PROTECT, related_name=\"child_categories\", null=True, blank=True, ) name =", "tag = \"\".join([ele.title() for ele in tag.split(\" \")]) return tag def save(self, *args,", "= models.CharField(max_length=50, unique=True) def __str__(self): return self.tag @classmethod def tagger(cls, tag: str) ->", "blank=True) class Meta: unique_together = (\"author\", \"title\") def save(self, *args, **kwargs): print(self.__dict__) if", "def save(self, *args, **kwargs): self.slug = slugify(self.name) super().save(*args, **kwargs) def __str__(self): return self.name", "class Blog(ClusterableModel, MetadataModel): id = models.BigIntegerField( _(\"Random id\"), default=RandomID(\"blog.Blog\"), primary_key=True ) author =", "modelcluster.models import ClusterableModel from wagtail.core.fields import RichTextField from hextech_core.core.models.base_model import BaseModel, MetadataModel from", "blank=True, ) name = models.CharField(max_length=100) slug = models.SlugField(blank=True, unique=True, db_index=True) def save(self, *args,", "from django.utils.text import slugify from django.utils.translation import gettext_lazy as _ from modelcluster.fields import", "modelcluster.fields import ParentalKey from modelcluster.models import ClusterableModel from wagtail.core.fields import RichTextField from hextech_core.core.models.base_model", "slug = models.SlugField(blank=True, unique=True, db_index=True, max_length=450) tags = models.ManyToManyField(BlogTag, blank=True) published = models.BooleanField(default=True)", "{self.title if self.title else 'Untitled'}\" class BlogLike(BaseModel): blog = models.ForeignKey(Blog, on_delete=models.CASCADE, related_name=\"likes\") user", "as _ from modelcluster.fields import ParentalKey from modelcluster.models import ClusterableModel from wagtail.core.fields import", "on_delete=models.CASCADE, related_name=\"likes\") user = models.ForeignKey(User, on_delete=models.CASCADE, related_name=\"liked\") is_like = models.BooleanField(default=True) class Meta: unique_together", "import slugify from django.utils.translation import gettext_lazy as _ from modelcluster.fields import ParentalKey from", "super().save(*args, **kwargs) def __str__(self): return self.name class BlogTag(BaseModel): tag = models.CharField(max_length=50, unique=True) def", "User class BlogCategory(MetadataModel): parent = models.ForeignKey( \"self\", on_delete=models.PROTECT, related_name=\"child_categories\", null=True, blank=True, ) name", "**kwargs): self.slug = slugify(self.name) super().save(*args, **kwargs) def __str__(self): return self.name class BlogTag(BaseModel): tag", "created_by = models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"+\", null=True, blank=True ) def __str__(self): return f\"#{self.blog.id}", "import no_accent_vietnamese from hextech_core.core.utils.id import RandomID from hextech_core.users.models import User class BlogCategory(MetadataModel): parent", "related_name=\"child_categories\", null=True, blank=True, ) name = models.CharField(max_length=100) slug = models.SlugField(blank=True, unique=True, db_index=True) def", "ClusterableModel from wagtail.core.fields import RichTextField from hextech_core.core.models.base_model import BaseModel, MetadataModel from hextech_core.core.utils import", "*args, **kwargs): if not self.pk: self.tag = self.tagger(self.tag) return super().save() class Blog(ClusterableModel, MetadataModel):", "author = models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"blogs\", db_index=True ) category = models.ForeignKey( BlogCategory, on_delete=models.SET_NULL,", "null=True, ) title = models.CharField(max_length=400) content = RichTextField() slug = models.SlugField(blank=True, unique=True, db_index=True,", "= timezone.now() self.slug = f\"{slugify(self.title)}-{self.author.id}\" super().save(*args, **kwargs) def __str__(self): return self.title class BlogComment(BaseModel):", "django.utils import timezone from django.utils.text import slugify from django.utils.translation import gettext_lazy as _", "__str__(self): return f\"#{self.blog.id} - {self.title if self.title else 'Untitled'}\" class BlogLike(BaseModel): blog =", "class BlogTag(BaseModel): tag = models.CharField(max_length=50, unique=True) def __str__(self): return self.tag @classmethod def tagger(cls,", "= \"\".join([ele.title() for ele in tag.split(\" \")]) return tag def save(self, *args, **kwargs):", "RichTextField() slug = models.SlugField(blank=True, unique=True, db_index=True, max_length=450) tags = models.ManyToManyField(BlogTag, blank=True) published =", "= RichTextField() title = models.CharField(max_length=255) created_by = models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"+\", null=True, blank=True", "title = models.CharField(max_length=255) created_by = models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"+\", null=True, blank=True ) def", "from hextech_core.core.utils.id import RandomID from hextech_core.users.models import User class BlogCategory(MetadataModel): parent = models.ForeignKey(", "slug = models.SlugField(blank=True, unique=True, db_index=True) def save(self, *args, **kwargs): self.slug = slugify(self.name) super().save(*args,", "primary_key=True ) author = models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"blogs\", db_index=True ) category = models.ForeignKey(", "models.CharField(max_length=400) content = RichTextField() slug = models.SlugField(blank=True, unique=True, db_index=True, max_length=450) tags = models.ManyToManyField(BlogTag,", "hextech_core.users.models import User class BlogCategory(MetadataModel): parent = models.ForeignKey( \"self\", on_delete=models.PROTECT, related_name=\"child_categories\", null=True, blank=True,", "class Meta: unique_together = (\"author\", \"title\") def save(self, *args, **kwargs): print(self.__dict__) if self.published", "db_index=True, null=True, ) title = models.CharField(max_length=400) content = RichTextField() slug = models.SlugField(blank=True, unique=True,", "from django.db import models from django.utils import timezone from django.utils.text import slugify from", "tag def save(self, *args, **kwargs): if not self.pk: self.tag = self.tagger(self.tag) return super().save()", "db_index=True, max_length=450) tags = models.ManyToManyField(BlogTag, blank=True) published = models.BooleanField(default=True) published_at = models.DateTimeField(null=True, blank=True)", "= models.SlugField(blank=True, unique=True, db_index=True) def save(self, *args, **kwargs): self.slug = slugify(self.name) super().save(*args, **kwargs)", "return f\"#{self.blog.id} - {self.title if self.title else 'Untitled'}\" class BlogLike(BaseModel): blog = models.ForeignKey(Blog,", "self.tag @classmethod def tagger(cls, tag: str) -> str: tag = no_accent_vietnamese(tag) tag =", "= ParentalKey(Blog, on_delete=models.CASCADE, related_name=\"comments\") content = RichTextField() title = models.CharField(max_length=255) created_by = models.ForeignKey(", "related_name=\"comments\") content = RichTextField() title = models.CharField(max_length=255) created_by = models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"+\",", "models.ForeignKey(Blog, on_delete=models.CASCADE, related_name=\"likes\") user = models.ForeignKey(User, on_delete=models.CASCADE, related_name=\"liked\") is_like = models.BooleanField(default=True) class Meta:", "= no_accent_vietnamese(tag) tag = \"\".join([ele.title() for ele in tag.split(\" \")]) return tag def", "blank=True) published = models.BooleanField(default=True) published_at = models.DateTimeField(null=True, blank=True) class Meta: unique_together = (\"author\",", "BlogLike(BaseModel): blog = models.ForeignKey(Blog, on_delete=models.CASCADE, related_name=\"likes\") user = models.ForeignKey(User, on_delete=models.CASCADE, related_name=\"liked\") is_like =", "super().save() class Blog(ClusterableModel, MetadataModel): id = models.BigIntegerField( _(\"Random id\"), default=RandomID(\"blog.Blog\"), primary_key=True ) author", "\"users.User\", on_delete=models.CASCADE, related_name=\"blogs\", db_index=True ) category = models.ForeignKey( BlogCategory, on_delete=models.SET_NULL, related_name=\"blogs\", db_index=True, null=True,", "models.ManyToManyField(BlogTag, blank=True) published = models.BooleanField(default=True) published_at = models.DateTimeField(null=True, blank=True) class Meta: unique_together =", "*args, **kwargs): print(self.__dict__) if self.published and not self.published_at: self.published_at = timezone.now() self.slug =", "**kwargs): if not self.pk: self.tag = self.tagger(self.tag) return super().save() class Blog(ClusterableModel, MetadataModel): id", "f\"{slugify(self.title)}-{self.author.id}\" super().save(*args, **kwargs) def __str__(self): return self.title class BlogComment(BaseModel): blog = ParentalKey(Blog, on_delete=models.CASCADE,", "unique=True, db_index=True) def save(self, *args, **kwargs): self.slug = slugify(self.name) super().save(*args, **kwargs) def __str__(self):", "RichTextField() title = models.CharField(max_length=255) created_by = models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"+\", null=True, blank=True )", "_(\"Random id\"), default=RandomID(\"blog.Blog\"), primary_key=True ) author = models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"blogs\", db_index=True )", "hextech_core.core.utils.id import RandomID from hextech_core.users.models import User class BlogCategory(MetadataModel): parent = models.ForeignKey( \"self\",", "if self.published and not self.published_at: self.published_at = timezone.now() self.slug = f\"{slugify(self.title)}-{self.author.id}\" super().save(*args, **kwargs)", "null=True, blank=True, ) name = models.CharField(max_length=100) slug = models.SlugField(blank=True, unique=True, db_index=True) def save(self,", "\"users.User\", on_delete=models.CASCADE, related_name=\"+\", null=True, blank=True ) def __str__(self): return f\"#{self.blog.id} - {self.title if", "- {self.title if self.title else 'Untitled'}\" class BlogLike(BaseModel): blog = models.ForeignKey(Blog, on_delete=models.CASCADE, related_name=\"likes\")", "max_length=450) tags = models.ManyToManyField(BlogTag, blank=True) published = models.BooleanField(default=True) published_at = models.DateTimeField(null=True, blank=True) class", "for ele in tag.split(\" \")]) return tag def save(self, *args, **kwargs): if not", ") category = models.ForeignKey( BlogCategory, on_delete=models.SET_NULL, related_name=\"blogs\", db_index=True, null=True, ) title = models.CharField(max_length=400)", "print(self.__dict__) if self.published and not self.published_at: self.published_at = timezone.now() self.slug = f\"{slugify(self.title)}-{self.author.id}\" super().save(*args,", "models.SlugField(blank=True, unique=True, db_index=True) def save(self, *args, **kwargs): self.slug = slugify(self.name) super().save(*args, **kwargs) def", "return self.tag @classmethod def tagger(cls, tag: str) -> str: tag = no_accent_vietnamese(tag) tag", "tagger(cls, tag: str) -> str: tag = no_accent_vietnamese(tag) tag = \"\".join([ele.title() for ele", "@classmethod def tagger(cls, tag: str) -> str: tag = no_accent_vietnamese(tag) tag = \"\".join([ele.title()", "(\"author\", \"title\") def save(self, *args, **kwargs): print(self.__dict__) if self.published and not self.published_at: self.published_at", "django.db import models from django.utils import timezone from django.utils.text import slugify from django.utils.translation", "from django.utils import timezone from django.utils.text import slugify from django.utils.translation import gettext_lazy as", "and not self.published_at: self.published_at = timezone.now() self.slug = f\"{slugify(self.title)}-{self.author.id}\" super().save(*args, **kwargs) def __str__(self):", "return self.title class BlogComment(BaseModel): blog = ParentalKey(Blog, on_delete=models.CASCADE, related_name=\"comments\") content = RichTextField() title", "blank=True ) def __str__(self): return f\"#{self.blog.id} - {self.title if self.title else 'Untitled'}\" class", "on_delete=models.CASCADE, related_name=\"+\", null=True, blank=True ) def __str__(self): return f\"#{self.blog.id} - {self.title if self.title", "self.title class BlogComment(BaseModel): blog = ParentalKey(Blog, on_delete=models.CASCADE, related_name=\"comments\") content = RichTextField() title =", "tag.split(\" \")]) return tag def save(self, *args, **kwargs): if not self.pk: self.tag =", "RandomID from hextech_core.users.models import User class BlogCategory(MetadataModel): parent = models.ForeignKey( \"self\", on_delete=models.PROTECT, related_name=\"child_categories\",", "BlogTag(BaseModel): tag = models.CharField(max_length=50, unique=True) def __str__(self): return self.tag @classmethod def tagger(cls, tag:", "default=RandomID(\"blog.Blog\"), primary_key=True ) author = models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"blogs\", db_index=True ) category =", "import BaseModel, MetadataModel from hextech_core.core.utils import no_accent_vietnamese from hextech_core.core.utils.id import RandomID from hextech_core.users.models", "**kwargs) def __str__(self): return self.title class BlogComment(BaseModel): blog = ParentalKey(Blog, on_delete=models.CASCADE, related_name=\"comments\") content", "related_name=\"likes\") user = models.ForeignKey(User, on_delete=models.CASCADE, related_name=\"liked\") is_like = models.BooleanField(default=True) class Meta: unique_together =", "from hextech_core.core.models.base_model import BaseModel, MetadataModel from hextech_core.core.utils import no_accent_vietnamese from hextech_core.core.utils.id import RandomID", "unique=True) def __str__(self): return self.tag @classmethod def tagger(cls, tag: str) -> str: tag", "from modelcluster.models import ClusterableModel from wagtail.core.fields import RichTextField from hextech_core.core.models.base_model import BaseModel, MetadataModel", "in tag.split(\" \")]) return tag def save(self, *args, **kwargs): if not self.pk: self.tag", "import ParentalKey from modelcluster.models import ClusterableModel from wagtail.core.fields import RichTextField from hextech_core.core.models.base_model import", "= models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"blogs\", db_index=True ) category = models.ForeignKey( BlogCategory, on_delete=models.SET_NULL, related_name=\"blogs\",", "import models from django.utils import timezone from django.utils.text import slugify from django.utils.translation import", "= self.tagger(self.tag) return super().save() class Blog(ClusterableModel, MetadataModel): id = models.BigIntegerField( _(\"Random id\"), default=RandomID(\"blog.Blog\"),", "else 'Untitled'}\" class BlogLike(BaseModel): blog = models.ForeignKey(Blog, on_delete=models.CASCADE, related_name=\"likes\") user = models.ForeignKey(User, on_delete=models.CASCADE,", "parent = models.ForeignKey( \"self\", on_delete=models.PROTECT, related_name=\"child_categories\", null=True, blank=True, ) name = models.CharField(max_length=100) slug", "models.ForeignKey( \"self\", on_delete=models.PROTECT, related_name=\"child_categories\", null=True, blank=True, ) name = models.CharField(max_length=100) slug = models.SlugField(blank=True,", "not self.pk: self.tag = self.tagger(self.tag) return super().save() class Blog(ClusterableModel, MetadataModel): id = models.BigIntegerField(", "= models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"+\", null=True, blank=True ) def __str__(self): return f\"#{self.blog.id} -", "blog = ParentalKey(Blog, on_delete=models.CASCADE, related_name=\"comments\") content = RichTextField() title = models.CharField(max_length=255) created_by =", "ParentalKey from modelcluster.models import ClusterableModel from wagtail.core.fields import RichTextField from hextech_core.core.models.base_model import BaseModel,", "\"self\", on_delete=models.PROTECT, related_name=\"child_categories\", null=True, blank=True, ) name = models.CharField(max_length=100) slug = models.SlugField(blank=True, unique=True,", "ele in tag.split(\" \")]) return tag def save(self, *args, **kwargs): if not self.pk:", "title = models.CharField(max_length=400) content = RichTextField() slug = models.SlugField(blank=True, unique=True, db_index=True, max_length=450) tags", "def __str__(self): return self.name class BlogTag(BaseModel): tag = models.CharField(max_length=50, unique=True) def __str__(self): return", "on_delete=models.CASCADE, related_name=\"comments\") content = RichTextField() title = models.CharField(max_length=255) created_by = models.ForeignKey( \"users.User\", on_delete=models.CASCADE,", "def __str__(self): return self.tag @classmethod def tagger(cls, tag: str) -> str: tag =", "models.CharField(max_length=100) slug = models.SlugField(blank=True, unique=True, db_index=True) def save(self, *args, **kwargs): self.slug = slugify(self.name)", "published = models.BooleanField(default=True) published_at = models.DateTimeField(null=True, blank=True) class Meta: unique_together = (\"author\", \"title\")", "**kwargs): print(self.__dict__) if self.published and not self.published_at: self.published_at = timezone.now() self.slug = f\"{slugify(self.title)}-{self.author.id}\"", "super().save(*args, **kwargs) def __str__(self): return self.title class BlogComment(BaseModel): blog = ParentalKey(Blog, on_delete=models.CASCADE, related_name=\"comments\")", "f\"#{self.blog.id} - {self.title if self.title else 'Untitled'}\" class BlogLike(BaseModel): blog = models.ForeignKey(Blog, on_delete=models.CASCADE,", "db_index=True ) category = models.ForeignKey( BlogCategory, on_delete=models.SET_NULL, related_name=\"blogs\", db_index=True, null=True, ) title =", "self.slug = slugify(self.name) super().save(*args, **kwargs) def __str__(self): return self.name class BlogTag(BaseModel): tag =", "save(self, *args, **kwargs): self.slug = slugify(self.name) super().save(*args, **kwargs) def __str__(self): return self.name class", "related_name=\"+\", null=True, blank=True ) def __str__(self): return f\"#{self.blog.id} - {self.title if self.title else", "-> str: tag = no_accent_vietnamese(tag) tag = \"\".join([ele.title() for ele in tag.split(\" \")])", "BlogComment(BaseModel): blog = ParentalKey(Blog, on_delete=models.CASCADE, related_name=\"comments\") content = RichTextField() title = models.CharField(max_length=255) created_by", "self.slug = f\"{slugify(self.title)}-{self.author.id}\" super().save(*args, **kwargs) def __str__(self): return self.title class BlogComment(BaseModel): blog =", "tag = models.CharField(max_length=50, unique=True) def __str__(self): return self.tag @classmethod def tagger(cls, tag: str)", "save(self, *args, **kwargs): print(self.__dict__) if self.published and not self.published_at: self.published_at = timezone.now() self.slug", "on_delete=models.CASCADE, related_name=\"blogs\", db_index=True ) category = models.ForeignKey( BlogCategory, on_delete=models.SET_NULL, related_name=\"blogs\", db_index=True, null=True, )", ") title = models.CharField(max_length=400) content = RichTextField() slug = models.SlugField(blank=True, unique=True, db_index=True, max_length=450)", "self.pk: self.tag = self.tagger(self.tag) return super().save() class Blog(ClusterableModel, MetadataModel): id = models.BigIntegerField( _(\"Random", "user = models.ForeignKey(User, on_delete=models.CASCADE, related_name=\"liked\") is_like = models.BooleanField(default=True) class Meta: unique_together = (\"blog\",", "\"\".join([ele.title() for ele in tag.split(\" \")]) return tag def save(self, *args, **kwargs): if", "unique=True, db_index=True, max_length=450) tags = models.ManyToManyField(BlogTag, blank=True) published = models.BooleanField(default=True) published_at = models.DateTimeField(null=True,", "= f\"{slugify(self.title)}-{self.author.id}\" super().save(*args, **kwargs) def __str__(self): return self.title class BlogComment(BaseModel): blog = ParentalKey(Blog,", "\")]) return tag def save(self, *args, **kwargs): if not self.pk: self.tag = self.tagger(self.tag)", ") name = models.CharField(max_length=100) slug = models.SlugField(blank=True, unique=True, db_index=True) def save(self, *args, **kwargs):", "on_delete=models.PROTECT, related_name=\"child_categories\", null=True, blank=True, ) name = models.CharField(max_length=100) slug = models.SlugField(blank=True, unique=True, db_index=True)", "str) -> str: tag = no_accent_vietnamese(tag) tag = \"\".join([ele.title() for ele in tag.split(\"", "def __str__(self): return f\"#{self.blog.id} - {self.title if self.title else 'Untitled'}\" class BlogLike(BaseModel): blog", "unique_together = (\"author\", \"title\") def save(self, *args, **kwargs): print(self.__dict__) if self.published and not", "content = RichTextField() title = models.CharField(max_length=255) created_by = models.ForeignKey( \"users.User\", on_delete=models.CASCADE, related_name=\"+\", null=True," ]
[ "only for the positive data points (label==1). ''' if self.epoch >= self.warmup_epochs: logits_start", "= max(unl_ind.sum(), 1) # # # Extract the logits for cross entropy loss", "# # self.model = CIFAR10_LeNet() # # def run_train(self, # train_data, # test_data,", "x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() # x_adv_sampled = x_adv + x_train_data # # for step", "# test_data, # lamda=0.5, # radius=8, # gamma=2, # verbose=False, # learning_rate=1e-3, #", "logits_start[target == 1] # # AdvLoss # if not half: # adv_loss =", "/ norm_h).view(-1, *[1] * (h.dim() - 1)) h = proj * h x_adv_sampled", "def one_class_adv_loss(self, x_train_data, half=True): \"\"\"Computes the adversarial loss: 1) Sample points initially at", "# scores = logits # label_score += list(zip(target.cpu().data.numpy().tolist(), # scores.cpu().data.numpy().tolist())) # # Compute", "most optimal point in set N_i(r) classified as +ve (label=0). This is done", "the CE loss wrt label 0 # 3) Project the points between spheres", "import torch import torch.nn.functional as F import torch.utils.data import torch.utils.data from models.base_models import", "= logits_start[target == 1] # # AdvLoss # if not half: # adv_loss", "grad_normalized) # # if (step + 1) % 10 == 0: # #", "\"\"\"Computes the adversarial loss: 1) Sample points initially at random around the positive", "points between spheres of radius R and gamma * R # (set N_i(r))", "around the training data # We will perform SGD on these to find", "# if metric == 'alpha': # test_metric = (scores > 0.5).mean() # return", "if metric == 'alpha': # test_metric = (scores > 0.5).mean() # return test_metric", "data points # 2) Gradient ascent to find the most optimal point in", "# Parameters # ---------- # x_train_data: Batch of data to compute loss on.", "now on the surface of hyper-sphere # # if half: # adv_pred =", "entropy loss # logits_start = self.model.half_forward_start(data) # logits = self.model.half_forward_end(logits_start[lab_ind]) # # logits", "(label=0). This is done by maximizing # the CE loss wrt label 0", "batch[0], batch[2] data, target = data.to(device), target.to(device) # Data Processing data = data.to(torch.float)", "# adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), target[unl_ind], half) # else: # adv_loss = self.one_class_adv_loss(logits_start.detach(), target[unl_ind],", "average CE Loss ''' Adversarial Loss is calculated only for the positive data", "# self.radius = radius # self.gamma = gamma # # self.optimizer = optim.Adam(self.model.parameters(),", "# logits_start = self.model.half_forward_start(data) # logits = self.model.half_forward_end(logits_start) # # logits = torch.squeeze(logits,", "{}: {}'.format( # 'AUC', self.best_score # )) # # def test(self, test_loader, metric='AUC'):", "not self.half: adv_loss = self.one_class_adv_loss(data[target == 1].detach(), self.half) else: adv_loss = self.one_class_adv_loss(logits_start.detach(), self.half)", "# # Compute test score # labels, scores = zip(*label_score) # labels =", "roc_auc_score(labels, scores) # if metric == 'alpha': # test_metric = (scores > 0.5).mean()", "scores.cpu().data.numpy().tolist())) # # Compute test score # labels, scores = zip(*label_score) # labels", "torch.squeeze(target) # # self.optimizer.zero_grad() # # lab_ind = target == 1 # unl_ind", "= [] # batch_idx = -1 # for data, target, _ in test_loader:", "= torch.squeeze(adv_pred, dim=1) adv_loss = F.binary_cross_entropy_with_logits(adv_pred, new_targets) return adv_loss # class DROCC(nn.Module): #", "Extract the logits for cross entropy loss # logits_start = self.model.half_forward_start(data) # logits", "radius=8, # gamma=2, # verbose=False, # learning_rate=1e-3, # total_epochs=30, # only_ce_epochs=6, # ascent_step_size=0.001,", "the normal points to the set N_i(r) h = x_adv_sampled - x_train_data norm_h", "Sample points initially at random around the positive training # data points #", "CE loss wrt target class 0 Parameters ---------- x_train_data: Batch of data to", "R (set N_i(r)) 4) Pass the calculated adversarial points through the model, and", "code DROCC is borrowed from https://github.com/microsoft/EdgeML class DROCC(OCModel): def __init__(self, model=Net, lam=0.5, radius=8,", "# AdvLoss # epoch_ce_loss = 0 # Cross entropy Loss # # batch_idx", "= (1 - targets).to(self.device) # new_targets = torch.squeeze(new_targets) # new_targets = new_targets.to(torch.float) #", "# def __init__(self, ): # super().__init__() # # self.model = CIFAR10_LeNet() # #", "# )) # # def test(self, test_loader, metric='AUC'): # \"\"\"Evaluate the model on", "lab_ind = target == 1 unl_ind = target == 0 # lab_cnt =", "target.to(device) lab_ind = target == 1 unl_ind = target == 0 # lab_cnt", "self.optimizer.step() # # epoch_ce_loss = epoch_ce_loss / (batch_idx + 1) # Average CE", "target = target.to(torch.float) # target = torch.squeeze(target) # # logits = self.model(data) #", "torch.randn(x_train_data.shape).to(device).detach().requires_grad_() # x_adv_sampled = x_adv + x_train_data # # for step in range(self.ascent_num_steps):", "if half: # logits = self.model.half_forward_end(x_adv_sampled) # else: # logits = self.model(x_adv_sampled) #", "logits = torch.squeeze(logits, dim=1) new_loss = F.binary_cross_entropy_with_logits(logits, new_targets) grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0] grad_norm", "lab_cnt = max(lab_ind.sum(), 1) # unl_cnt = max(unl_ind.sum(), 1) # # # Extract", "data.to(device), target.to(device) lab_ind = target == 1 unl_ind = target == 0 #", "# grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0] # grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim()))) #", "is done by maximizing # the CE loss wrt label 0 # 3)", "= F.binary_cross_entropy_with_logits(logits, new_targets) grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0] grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim())))", "Make use of broadcast to project h # proj = (alpha / norm_h).view(-1,", "(alpha / norm_h).view(-1, *[1] * (h.dim() - 1)) h = proj * h", "the CE loss wrt label 0 3) Project the points between spheres of", "= ce_loss + adv_loss * self.lamda # else: # # If only CE", "= test_score # best_model = copy.deepcopy(self.model) # # print('Epoch: {}, CE Loss: {},", "_ in test_loader: # batch_idx += 1 # data, target = data.to(device), target.to(device)", "= target == 0 # # # lab_cnt = max(lab_ind.sum(), 1) # unl_cnt", "+= 1 # data, target = data.to(device), target.to(device) # data = data.to(torch.float) #", "torch.squeeze(adv_pred, dim=1) # adv_loss = F.binary_cross_entropy_with_logits(adv_pred, (new_targets)) # # return adv_loss # #", "logits # label_score += list(zip(target.cpu().data.numpy().tolist(), # scores.cpu().data.numpy().tolist())) # # Compute test score #", "# new_targets = (1 - targets).to(self.device) # new_targets = torch.squeeze(new_targets) # new_targets =", "wrt target class 0 Parameters ---------- x_train_data: Batch of data to compute loss", "# if verbose: # test_score = self.test(test_loader) # if test_score > self.best_score: #", "# self.optimizer.zero_grad() # # lab_ind = target == 1 # unl_ind = target", "= torch.squeeze(target) # Extract the logits for cross entropy loss logits_start = self.model.forward_start(data)", "target[unl_ind], half) # epoch_adv_loss += adv_loss # # loss = ce_loss + adv_loss", "= F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # Add to the epoch variable for printing average CE", "new_targets = new_targets.to(torch.float) # # if half: # logits = self.model.half_forward_end(x_adv_sampled) # else:", "Extract the logits for cross entropy loss logits_start = self.model.forward_start(data) logits = self.model.forward_end(logits_start[lab_ind])", "# self.model = copy.deepcopy(best_model) # print('\\nBest test {}: {}'.format( # 'AUC', self.best_score #", "# epoch_ce_loss = epoch_ce_loss / (batch_idx + 1) # Average CE Loss #", "if metric == 'AUC': # test_metric = roc_auc_score(labels, scores) # if metric ==", "gamma_lr=1, # batch_size=128, # half=True): # # self.best_score = -np.inf # best_model =", "epoch_adv_loss += adv_loss # # loss = ce_loss + adv_loss * self.lamda #", "data, _, target in train_loader: # batch_idx += 1 # data, target =", "torch.sqrt(torch.sum(h ** 2, dim=tuple(range(1, h.dim())))) alpha = torch.clamp(norm_h, self.radius, self.gamma * self.radius).to(device) #", "(label==1). # ''' # if epoch >= only_ce_epochs: # logits_start = logits_start[target ==", "Average AdvLoss # # if verbose: # test_score = self.test(test_loader) # if test_score", "# Average AdvLoss # # if verbose: # test_score = self.test(test_loader) # if", "target class 0 # # Parameters # ---------- # x_train_data: Batch of data", "# train_data, # test_data, # lamda=0.5, # radius=8, # gamma=2, # verbose=False, #", "at random around the positive training # data points # 2) Gradient ascent", "data = data.to(torch.float) # target = target.to(torch.float) # target = torch.squeeze(target) # #", "epoch_adv_loss = epoch_adv_loss / (batch_idx + 1) # Average AdvLoss # # if", "__init__(self, ): # super().__init__() # # self.model = CIFAR10_LeNet() # # def run_train(self,", "half) # else: # adv_loss = self.one_class_adv_loss(logits_start.detach(), target[target == 1], half) # epoch_adv_loss", "the adversarial points # x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() # x_adv_sampled = x_adv + x_train_data", "if half: logits = self.model.forward_end(x_adv_sampled) else: logits = self.model(x_adv_sampled) logits = torch.squeeze(logits, dim=1)", "grad_norm.view(-1, *[1] * (grad.dim() - 1)) grad_normalized = grad / grad_norm with torch.no_grad():", "# 4) Pass the calculated adversarial points through the model, # and calculate", "test_score > self.best_score: # self.best_score = test_score # best_model = copy.deepcopy(self.model) # #", "class 0 # # Parameters # ---------- # x_train_data: Batch of data to", "# target = torch.squeeze(target) # # self.optimizer.zero_grad() # # lab_ind = target ==", "adversarial points through the model, and calculate the CE loss wrt target class", "= self.model.forward_end(logits_start) logits = torch.squeeze(logits, dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits, target) # Add to", "sample points around the training data # # We will perform SGD on", "ce_loss = F.binary_cross_entropy_with_logits(logits, target) # Add to the epoch variable for printing average", "= self.one_class_adv_loss(data[target == 1].detach(), target[target == 1], half) # else: # adv_loss =", "# torch.save(self.model.state_dict(), os.path.join(path, 'model.pt')) # # def load(self, path): # self.model.load_state_dict(torch.load(os.path.join(path, 'model.pt'))) class", "else: # adv_loss = self.one_class_adv_loss(logits_start.detach(), target[target == 1], half) # epoch_adv_loss += adv_loss", "= max(lab_ind.sum(), 1) unl_cnt = max(unl_ind.sum(), 1) # Extract the logits for cross", "target.to(torch.float) # target = torch.squeeze(target) # # logits = self.model(data) # logits =", "to the epoch variable for printing average CE Loss ''' Adversarial Loss is", "CE Loss: {}, AdvLoss: {}, {}: {}'.format( # epoch, epoch_ce_loss.item(), epoch_adv_loss.item(), # 'AUC',", "0 # # Parameters # ---------- # x_train_data: Batch of data to compute", "range(total_epochs): # # Make the weights trainable # self.model.train() # # # Placeholder", "# # logits = torch.squeeze(logits, dim=1) # ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # #", "= lam self.radius = radius self.gamma = gamma self.warmup_epochs = warmup_epochs self.ascent_step_size =", "= self.one_class_adv_loss(logits_start.detach(), self.half) loss = ce_loss + adv_loss * self.lam else: # If", "label 0 3) Project the points between spheres of radius R and gamma", "1) # Average CE Loss # epoch_adv_loss = epoch_adv_loss / (batch_idx + 1)", "# # new_targets = torch.zeros(batch_size, 1).to(device) # # new_targets = (1 - targets).to(self.device)", "'AUC', self.best_score # )) # # def test(self, test_loader, metric='AUC'): # \"\"\"Evaluate the", "grad_normalized = grad / grad_norm # with torch.no_grad(): # x_adv_sampled.add_(self.ascent_step_size * grad_normalized) #", "between spheres of radius R and gamma * R # (set N_i(r)) #", "= torch.squeeze(logits, dim=1) # sigmoid_logits = torch.sigmoid(logits) # scores = logits # label_score", "not half: # adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), target[unl_ind], half) # else: # adv_loss =", "metric: Metric used for evaluation (AUC / F1). # \"\"\" # self.model.eval() #", "done loss = ce_loss return loss def one_class_adv_loss(self, x_train_data, half=True): \"\"\"Computes the adversarial", "# lr_scheduler.step() # if verbose: # self.model = copy.deepcopy(best_model) # print('\\nBest test {}:", "# # Placeholder for the respective 2 loss values # epoch_adv_loss = torch.tensor([0]).type(torch.float32).to(device)", "h = proj * h # x_adv_sampled = x_train_data + h # These", "# AdvLoss if not self.half: adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), self.half) else: adv_loss = self.one_class_adv_loss(logits_start[unl_ind].detach(),", "on the surface of hyper-sphere # # if half: # adv_pred = self.model.half_forward_end(x_adv_sampled)", "= self.model.forward_end(x_adv_sampled) else: logits = self.model(x_adv_sampled) logits = torch.squeeze(logits, dim=1) new_loss = F.binary_cross_entropy_with_logits(logits,", "# ''' # if epoch >= only_ce_epochs: # logits_start = logits_start[target == 1]", "# # loss = ce_loss + adv_loss * self.lamda # else: # #", "= torch.autograd.grad(new_loss, [x_adv_sampled])[0] grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim()))) grad_norm = grad_norm.view(-1, *[1]", "unl_cnt > 1: # logits_start = logits_start[unl_ind] # # AdvLoss # if not", "grad_norm with torch.no_grad(): x_adv_sampled.add_(self.ascent_step_size * grad_normalized) if (step + 1) % 10 ==", "test dataset. # Parameters # ---------- # test_loader: Dataloader object for the test", "target.to(device) # data = data.to(torch.float) # target = target.to(torch.float) # target = torch.squeeze(target)", "# Make use of broadcast to project h proj = (alpha / norm_h).view(-1,", "= None # self.ascent_num_steps = ascent_num_steps # self.ascent_step_size = ascent_step_size # self.lamda =", "only for the positive data points (label==1). ''' if self.epoch >= self.warmup_epochs and", "set N_i(r) classified as +ve (label=0). This is done by maximizing the CE", "# # lab_ind = target == 1 # unl_ind = target == 0", "positive data points (label==1). ''' if self.epoch >= self.warmup_epochs: logits_start = logits_start[target ==", "the adversarial points x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() x_adv_sampled = x_adv + x_train_data for step", "adv_loss * self.lam else: # If only CE based training has to be", "* (h.dim() - 1)) # h = proj * h # x_adv_sampled =", "= torch.utils.data.DataLoader(test_data, # batch_size=batch_size, # shuffle=True) # # for epoch in range(total_epochs): #", "= torch.sqrt(torch.sum(h ** 2, # dim=tuple(range(1, h.dim())))) # alpha = torch.clamp(norm_h, self.radius, #", "== 1].detach(), target[target == 1], half) # else: # adv_loss = self.one_class_adv_loss(logits_start.detach(), target[target", "self.half) else: adv_loss = self.one_class_adv_loss(logits_start.detach(), self.half) loss = ce_loss + adv_loss * self.lam", "self.model.forward_end(x_adv_sampled) else: logits = self.model(x_adv_sampled) logits = torch.squeeze(logits, dim=1) new_loss = F.binary_cross_entropy_with_logits(logits, new_targets)", "adv_loss = F.binary_cross_entropy_with_logits(adv_pred, new_targets) return adv_loss # class DROCC(nn.Module): # def __init__(self, ):", "half: logits = self.model.forward_end(x_adv_sampled) else: logits = self.model(x_adv_sampled) logits = torch.squeeze(logits, dim=1) new_loss", "points # x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() # x_adv_sampled = x_adv + x_train_data # #", "dim=1) adv_loss = F.binary_cross_entropy_with_logits(adv_pred, new_targets) return adv_loss # class PU_DROCC(nn.Module): # def __init__(self,", "epoch_ce_loss = 0 # Cross entropy Loss # # batch_idx = -1 #", "logits_start = self.model.half_forward_start(data) # logits = self.model.half_forward_end(logits_start) # # logits = torch.squeeze(logits, dim=1)", "project h proj = (alpha / norm_h).view(-1, *[1] * (h.dim() - 1)) h", "epoch in range(total_epochs): # # Make the weights trainable # self.model.train() # #", "Loss is calculated only for the positive data points (label==1). # ''' #", "# loss = ce_loss + adv_loss * self.lamda # else: # # If", "1)) h = proj * h x_adv_sampled = x_train_data + h # These", "best_model = None # self.ascent_num_steps = ascent_num_steps # self.ascent_step_size = ascent_step_size # self.lamda", "torch.utils.data import torch.utils.data from models.base_models import OCModel, PUModelRandomBatch from models.classifiers import Net device", "shuffle=True) # # test_loader = torch.utils.data.DataLoader(test_data, # batch_size=batch_size, # shuffle=True) # # for", "OCModel, PUModelRandomBatch from models.classifiers import Net device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")", "to be done # loss = ce_loss # # # Backprop # loss.backward()", "# labels = np.array(labels) # scores = np.array(scores) # if metric == 'AUC':", "optim.lr_scheduler.ExponentialLR(self.optimizer, gamma=gamma_lr) # # train_loader = torch.utils.data.DataLoader(train_data, # batch_size=batch_size, # shuffle=True) # #", "sigmoid_logits = torch.sigmoid(logits) # scores = logits # label_score += list(zip(target.cpu().data.numpy().tolist(), # scores.cpu().data.numpy().tolist()))", "to the set N_i(r) # h = x_adv_sampled - x_train_data # norm_h =", "= batch[0], batch[2] data, target = data.to(device), target.to(device) # Data Processing data =", "gamma=2, # verbose=False, # learning_rate=1e-3, # total_epochs=30, # only_ce_epochs=6, # ascent_step_size=0.001, # ascent_num_steps=50,", "and gamma * R # (set N_i(r)) # 4) Pass the calculated adversarial", "Pass the calculated adversarial points through the model, and calculate the CE loss", "Randomly sample points around the training data # We will perform SGD on", "the logits for cross entropy loss # logits_start = self.model.half_forward_start(data) # logits =", "== 1] # # AdvLoss # if not half: # adv_loss = self.one_class_adv_loss(data[target", "_ in train_loader: # batch_idx += 1 # data, target = data.to(device), target.to(device)", "= torch.utils.data.DataLoader(train_data, # batch_size=batch_size, # shuffle=True) # # test_loader = torch.utils.data.DataLoader(test_data, # batch_size=batch_size,", "grad.dim()))) # grad_norm = grad_norm.view(-1, *[1] * (grad.dim() - 1)) # grad_normalized =", "= torch.squeeze(logits, dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits, target) # Add to the epoch variable", "# test_metric = (scores > 0.5).mean() # return test_metric # # def one_class_adv_loss(self,", "'model.pt'))) class PU_DROCC(PUModelRandomBatch): def __init__(self, model=Net, lam=0.5, radius=8, gamma=2, warmup_epochs=6, ascent_step_size=0.001, ascent_num_steps=50, half=True):", "data, target = data.to(device), target.to(device) # Data Processing data = data.to(torch.float) target =", "x_adv_sampled = x_adv + x_train_data for step in range(self.ascent_num_steps): with torch.enable_grad(): new_targets =", "''' # Adversarial Loss is calculated only for the positive data points (label==1).", "x_adv_sampled = x_adv + x_train_data # # for step in range(self.ascent_num_steps): # with", "= radius self.gamma = gamma self.warmup_epochs = warmup_epochs self.ascent_step_size = ascent_step_size self.ascent_num_steps =", "torch.squeeze(new_targets) new_targets = new_targets.to(torch.float) if half: logits = self.model.forward_end(x_adv_sampled) else: logits = self.model(x_adv_sampled)", "points through the model, # and calculate the CE loss wrt target class", "# # # Placeholder for the respective 2 loss values # epoch_adv_loss =", "only CE based training has to be done loss = ce_loss return loss", "dim=1) # sigmoid_logits = torch.sigmoid(logits) # scores = logits # label_score += list(zip(target.cpu().data.numpy().tolist(),", "# # def load(self, path): # self.model.load_state_dict(torch.load(os.path.join(path, 'model.pt'))) class PU_DROCC(PUModelRandomBatch): def __init__(self, model=Net,", "# gamma=2, # verbose=False, # learning_rate=5e-4, # total_epochs=20, # only_ce_epochs=2, # ascent_step_size=5e-6, #", "# ascent_step_size=5e-6, # ascent_num_steps=10, # gamma_lr=0.96, # batch_size=512, # half=True): # # self.best_score", "# Randomly sample points around the training data # # We will perform", "half=True): # \"\"\"Computes the adversarial loss: # 1) Sample points initially at random", "= torch.squeeze(target) # # self.optimizer.zero_grad() # # lab_ind = target == 1 #", "def run_train(self, # train_data, # test_data, # lamda=0.5, # radius=8, # gamma=2, #", "+ve (label=0). This is done by maximizing # the CE loss wrt label", "gamma=2, warmup_epochs=6, ascent_step_size=0.001, ascent_num_steps=50, half=True): super().__init__(model, 0) self.lam = lam self.radius = radius", "self.one_class_adv_loss(logits_start[unl_ind].detach(), self.half) loss = ce_loss + adv_loss * self.lam else: # If only", "printing average CE Loss ''' Adversarial Loss is calculated only for the positive", "adv_pred = self.model(x_adv_sampled) # # adv_pred = torch.squeeze(adv_pred, dim=1) # adv_loss = F.binary_cross_entropy_with_logits(adv_pred,", "N_i(r) h = x_adv_sampled - x_train_data norm_h = torch.sqrt(torch.sum(h ** 2, dim=tuple(range(1, h.dim()))))", "self.model(x_adv_sampled) # # logits = torch.squeeze(logits, dim=1) # new_loss = F.binary_cross_entropy_with_logits(logits, new_targets) #", "optimal point in set N_i(r) # classified as +ve (label=0). This is done", "self.model.forward_end(logits_start[lab_ind]) logits = torch.squeeze(logits, dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # Add to the", "to find the adversarial points x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() x_adv_sampled = x_adv + x_train_data", "# gamma_lr=1, # batch_size=128, # half=True): # # self.best_score = -np.inf # best_model", "x_train_data # norm_h = torch.sqrt(torch.sum(h ** 2, # dim=tuple(range(1, h.dim())))) # alpha =", "self.model.load_state_dict(torch.load(os.path.join(path, 'model.pt'))) class PU_DROCC(PUModelRandomBatch): def __init__(self, model=Net, lam=0.5, radius=8, gamma=2, warmup_epochs=6, ascent_step_size=0.001, ascent_num_steps=50,", "for cross entropy loss # logits_start = self.model.half_forward_start(data) # logits = self.model.half_forward_end(logits_start[lab_ind]) #", "proj * h x_adv_sampled = x_train_data + h # These adv_points are now", "gamma=2, # verbose=False, # learning_rate=5e-4, # total_epochs=20, # only_ce_epochs=2, # ascent_step_size=5e-6, # ascent_num_steps=10,", "# epoch_adv_loss += adv_loss # # loss = ce_loss + adv_loss * self.lamda", "torch.squeeze(adv_pred, dim=1) adv_loss = F.binary_cross_entropy_with_logits(adv_pred, new_targets) return adv_loss # class DROCC(nn.Module): # def", "\"\"\"Evaluate the model on the given test dataset. # Parameters # ---------- #", "= torch.squeeze(logits, dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # Add to the epoch variable", "= x_adv + x_train_data # # for step in range(self.ascent_num_steps): # with torch.enable_grad():", "torch.squeeze(logits, dim=1) # ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # # Add to the epoch", "grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0] grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim()))) grad_norm = grad_norm.view(-1,", "entropy Loss # # batch_idx = -1 # for data, _, target in", "adv_points are now on the surface of hyper-sphere # # if half: #", "1 # data, target = data.to(device), target.to(device) # data = data.to(torch.float) # target", "# target = torch.squeeze(target) # # logits = self.model(data) # logits = torch.squeeze(logits,", "self.model(x_adv_sampled) adv_pred = torch.squeeze(adv_pred, dim=1) adv_loss = F.binary_cross_entropy_with_logits(adv_pred, new_targets) return adv_loss # class", "+ x_train_data # # for step in range(self.ascent_num_steps): # with torch.enable_grad(): # #", "+ve (label=0). This is done by maximizing the CE loss wrt label 0", "1) # Average AdvLoss # # if verbose: # test_score = self.test(test_loader) #", "1 unl_ind = target == 0 # lab_cnt = max(lab_ind.sum(), 1) unl_cnt =", "proj * h # x_adv_sampled = x_train_data + h # These adv_points are", "new_targets.to(torch.float) # # if half: # logits = self.model.half_forward_end(x_adv_sampled) # else: # logits", "1: logits_start = logits_start[unl_ind] # AdvLoss if not self.half: adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), self.half)", "logits for cross entropy loss logits_start = self.model.forward_start(data) logits = self.model.forward_end(logits_start) logits =", "(label==1). ''' if self.epoch >= self.warmup_epochs and unl_cnt > 1: logits_start = logits_start[unl_ind]", "= grad / grad_norm # with torch.no_grad(): # x_adv_sampled.add_(self.ascent_step_size * grad_normalized) # #", "0 # lab_cnt = max(lab_ind.sum(), 1) unl_cnt = max(unl_ind.sum(), 1) # Extract the", "torch.squeeze(adv_pred, dim=1) adv_loss = F.binary_cross_entropy_with_logits(adv_pred, new_targets) return adv_loss # class PU_DROCC(nn.Module): # def", "positive data points (label==1). # ''' # if epoch >= only_ce_epochs and unl_cnt", "\"\"\"Computes the adversarial loss: # 1) Sample points initially at random around the", "# class DROCC(nn.Module): # def __init__(self, ): # super().__init__() # # self.model =", "* grad_normalized) # # if (step + 1) % 10 == 0: #", "of radius R and gamma * R # (set N_i(r)) # 4) Pass", "logits_start = logits_start[unl_ind] # # AdvLoss # if not half: # adv_loss =", "test_score)) # lr_scheduler.step() # if verbose: # self.model = copy.deepcopy(best_model) # print('\\nBest test", "wrt label 0 # 3) Project the points between spheres of radius R", "warmup_epochs=6, ascent_step_size=0.001, ascent_num_steps=50, half=True): super().__init__(model, 0) self.lam = lam self.radius = radius self.gamma", "epoch >= only_ce_epochs and unl_cnt > 1: # logits_start = logits_start[unl_ind] # #", "'AUC', test_score)) # lr_scheduler.step() # if verbose: # self.model = copy.deepcopy(best_model) # print('\\nBest", "torch.squeeze(logits, dim=1) # new_loss = F.binary_cross_entropy_with_logits(logits, new_targets) # # grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0]", "adv_points are now on the surface of hyper-sphere if half: adv_pred = self.model.forward_end(x_adv_sampled)", "'AUC': # test_metric = roc_auc_score(labels, scores) # if metric == 'alpha': # test_metric", "= torch.sigmoid(logits) # scores = logits # label_score += list(zip(target.cpu().data.numpy().tolist(), # scores.cpu().data.numpy().tolist())) #", "will perform SGD on these to find the adversarial points # x_adv =", "1)) # h = proj * h # x_adv_sampled = x_train_data + h", "= logits_start[unl_ind] # AdvLoss if not self.half: adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), self.half) else: adv_loss", "batch_loss(self, batch): data, target = batch[0], batch[2] data, target = data.to(device), target.to(device) #", "= target.to(torch.float) target = torch.squeeze(target) # Extract the logits for cross entropy loss", "grad / grad_norm # with torch.no_grad(): # x_adv_sampled.add_(self.ascent_step_size * grad_normalized) # # if", "# classified as +ve (label=0). This is done by maximizing # the CE", "ascent_num_steps=50, # gamma_lr=1, # batch_size=128, # half=True): # # self.best_score = -np.inf #", "to be done loss = ce_loss return loss def one_class_adv_loss(self, x_train_data, half=True): \"\"\"Computes", "self.best_score = -np.inf # best_model = None # self.ascent_num_steps = ascent_num_steps # self.ascent_step_size", "logits = torch.squeeze(logits, dim=1) # sigmoid_logits = torch.sigmoid(logits) # scores = logits #", "optim.Adam(self.model.parameters(), lr=learning_rate) # lr_scheduler = optim.lr_scheduler.ExponentialLR(self.optimizer, gamma=gamma_lr) # # train_loader = torch.utils.data.DataLoader(train_data, #", "Processing # data = data.to(torch.float) # target = target.to(torch.float) # target = torch.squeeze(target)", "data # We will perform SGD on these to find the adversarial points", "self.lamda = lamda # self.radius = radius # self.gamma = gamma # #", "torch.norm(grad, p=2, dim=tuple(range(1, grad.dim()))) # grad_norm = grad_norm.view(-1, *[1] * (grad.dim() - 1))", "around the positive training # data points # 2) Gradient ascent to find", "Project the points between spheres of radius R and gamma * R #", "with torch.no_grad(): # x_adv_sampled.add_(self.ascent_step_size * grad_normalized) # # if (step + 1) %", "= grad_norm.view(-1, *[1] * (grad.dim() - 1)) # grad_normalized = grad / grad_norm", "Parameters ---------- x_train_data: Batch of data to compute loss on. \"\"\" batch_size =", "new_targets = torch.squeeze(new_targets) new_targets = new_targets.to(torch.float) if half: logits = self.model.forward_end(x_adv_sampled) else: logits", "list(zip(target.cpu().data.numpy().tolist(), # scores.cpu().data.numpy().tolist())) # # Compute test score # labels, scores = zip(*label_score)", "F.binary_cross_entropy_with_logits(logits, target) # Add to the epoch variable for printing average CE Loss", "target.to(torch.float) target = torch.squeeze(target) # Extract the logits for cross entropy loss logits_start", "ce_loss return loss def one_class_adv_loss(self, x_train_data, half=True): \"\"\"Computes the adversarial loss: 1) Sample", "= torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") # code DROCC is borrowed from https://github.com/microsoft/EdgeML", "set N_i(r) # classified as +ve (label=0). This is done by maximizing #", "half=True): # # self.best_score = -np.inf # best_model = None # self.ascent_num_steps =", "test_loader: # batch_idx += 1 # data, target = data.to(device), target.to(device) # data", "= data.to(torch.float) # target = target.to(torch.float) # target = torch.squeeze(target) # # self.optimizer.zero_grad()", "- 1)) h = proj * h x_adv_sampled = x_train_data + h #", "grad / grad_norm with torch.no_grad(): x_adv_sampled.add_(self.ascent_step_size * grad_normalized) if (step + 1) %", "Make use of broadcast to project h proj = (alpha / norm_h).view(-1, *[1]", "(step + 1) % 10 == 0: # Project the normal points to", "# self.optimizer.step() # # epoch_ce_loss = epoch_ce_loss / (batch_idx + 1) # Average", "Parameters # ---------- # x_train_data: Batch of data to compute loss on. #", "adv_loss # # loss = ce_loss + adv_loss * self.lamda # else: #", "= self.model.half_forward_end(x_adv_sampled) # else: # adv_pred = self.model(x_adv_sampled) # # adv_pred = torch.squeeze(adv_pred,", "== 'alpha': # test_metric = (scores > 0.5).mean() # return test_metric # #", "# best_model = None # self.ascent_num_steps = ascent_num_steps # self.ascent_step_size = ascent_step_size #", "F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # # Add to the epoch variable for printing average CE", "copy.deepcopy(best_model) # print('\\nBest test {}: {}'.format( # 'AUC', self.best_score # )) # #", "# the CE loss wrt label 0 # 3) Project the points between", "# logits = torch.squeeze(logits, dim=1) # ce_loss = F.binary_cross_entropy_with_logits(logits, target) # # Add", "(scores > 0.5).mean() # return test_metric # # def one_class_adv_loss(self, x_train_data, targets, half=True):", "# x_adv_sampled = x_adv + x_train_data # # for step in range(self.ascent_num_steps): #", "# def save(self, path): # torch.save(self.model.state_dict(), os.path.join(path, 'model.pt')) # # def load(self, path):", "+ h # These adv_points are now on the surface of hyper-sphere if", "# logits_start = logits_start[target == 1] # # AdvLoss # if not half:", "1) # Extract the logits for cross entropy loss logits_start = self.model.forward_start(data) logits", "# with torch.no_grad(): # x_adv_sampled.add_(self.ascent_step_size * grad_normalized) # # if (step + 1)", "learning_rate=5e-4, # total_epochs=20, # only_ce_epochs=2, # ascent_step_size=5e-6, # ascent_num_steps=10, # gamma_lr=0.96, # batch_size=512,", "these to find the adversarial points x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() x_adv_sampled = x_adv +", "the epoch variable for printing average CE Loss ''' Adversarial Loss is calculated", "new_targets = new_targets.to(torch.float) if half: logits = self.model.forward_end(x_adv_sampled) else: logits = self.model(x_adv_sampled) logits", "# grad_norm = grad_norm.view(-1, *[1] * (grad.dim() - 1)) # grad_normalized = grad", "if epoch >= only_ce_epochs: # logits_start = logits_start[target == 1] # # AdvLoss", "= self.model.forward_end(x_adv_sampled) else: adv_pred = self.model(x_adv_sampled) adv_pred = torch.squeeze(adv_pred, dim=1) adv_loss = F.binary_cross_entropy_with_logits(adv_pred,", "# # Extract the logits for cross entropy loss # logits_start = self.model.half_forward_start(data)", "self.model.half_forward_end(logits_start[lab_ind]) # # logits = torch.squeeze(logits, dim=1) # ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind]) #", "torch.utils.data.DataLoader(test_data, # batch_size=batch_size, # shuffle=True) # # for epoch in range(total_epochs): # #", "if self.epoch >= self.warmup_epochs and unl_cnt > 1: logits_start = logits_start[unl_ind] # AdvLoss", "if epoch >= only_ce_epochs and unl_cnt > 1: # logits_start = logits_start[unl_ind] #", "AdvLoss: {}, {}: {}'.format( # epoch, epoch_ce_loss.item(), epoch_adv_loss.item(), # 'AUC', test_score)) # lr_scheduler.step()", "compute loss on. \"\"\" batch_size = len(x_train_data) # Randomly sample points around the", "= torch.squeeze(logits, dim=1) new_loss = F.binary_cross_entropy_with_logits(logits, new_targets) grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0] grad_norm =", "# # self.optimizer = optim.Adam(self.model.parameters(), lr=learning_rate) # lr_scheduler = optim.lr_scheduler.ExponentialLR(self.optimizer, gamma=gamma_lr) # #", "data.to(torch.float) # target = target.to(torch.float) # target = torch.squeeze(target) # # self.optimizer.zero_grad() #", "# 'AUC', test_score)) # lr_scheduler.step() # if verbose: # self.model = copy.deepcopy(best_model) #", "points x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() x_adv_sampled = x_adv + x_train_data for step in range(self.ascent_num_steps):", "batch_loss(self, batch): data, target = batch[0], batch[2] data, target = data.to(device), target.to(device) lab_ind", "self.one_class_adv_loss(logits_start.detach(), target[target == 1], half) # epoch_adv_loss += adv_loss # # loss =", "# # self.optimizer.zero_grad() # # # Extract the logits for cross entropy loss", "positive data points (label==1). # ''' # if epoch >= only_ce_epochs: # logits_start", "== 0: # Project the normal points to the set N_i(r) h =", "test {}: {}'.format( # 'AUC', self.best_score # )) # # def test(self, test_loader,", "self.model.forward_start(data) logits = self.model.forward_end(logits_start[lab_ind]) logits = torch.squeeze(logits, dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind]) #", "for data, target, _ in train_loader: # batch_idx += 1 # data, target", "# x_train_data: Batch of data to compute loss on. # \"\"\" # batch_size", "loss # logits_start = self.model.half_forward_start(data) # logits = self.model.half_forward_end(logits_start) # # logits =", "# # return adv_loss # # def save(self, path): # torch.save(self.model.state_dict(), os.path.join(path, 'model.pt'))", "self.model(x_adv_sampled) # # adv_pred = torch.squeeze(adv_pred, dim=1) # adv_loss = F.binary_cross_entropy_with_logits(adv_pred, (new_targets)) #", "from models.base_models import OCModel, PUModelRandomBatch from models.classifiers import Net device = torch.device(\"cuda\" if", "label_score += list(zip(target.cpu().data.numpy().tolist(), # scores.cpu().data.numpy().tolist())) # # Compute test score # labels, scores", "target = data.to(device), target.to(device) lab_ind = target == 1 unl_ind = target ==", "ascent_num_steps=10, # gamma_lr=0.96, # batch_size=512, # half=True): # # self.best_score = -np.inf #", "batch[2] data, target = data.to(device), target.to(device) # Data Processing data = data.to(torch.float) target", "# Add to the epoch variable for printing average CE Loss # epoch_ce_loss", "''' if self.epoch >= self.warmup_epochs: logits_start = logits_start[target == 1] # AdvLoss if", "calculated only for the positive data points (label==1). ''' if self.epoch >= self.warmup_epochs", "x_train_data, half=True): \"\"\"Computes the adversarial loss: 1) Sample points initially at random around", "self.gamma * self.radius).to(device) # # Make use of broadcast to project h #", "half: # adv_pred = self.model.half_forward_end(x_adv_sampled) # else: # adv_pred = self.model(x_adv_sampled) # #", "Loss is calculated only for the positive data points (label==1). ''' if self.epoch", "> self.best_score: # self.best_score = test_score # best_model = copy.deepcopy(self.model) # # print('Epoch:", "target, _ in test_loader: # batch_idx += 1 # data, target = data.to(device),", "data.to(device), target.to(device) # Data Processing data = data.to(torch.float) target = target.to(torch.float) target =", "DROCC(nn.Module): # def __init__(self, ): # super().__init__() # # self.model = CIFAR10_LeNet() #", "ce_loss = F.binary_cross_entropy_with_logits(logits, target) # # Add to the epoch variable for printing", "# # self.optimizer.zero_grad() # # lab_ind = target == 1 # unl_ind =", "0.5).mean() # return test_metric # # def one_class_adv_loss(self, x_train_data, targets, half=True): # \"\"\"Computes", "# batch_size=batch_size, # shuffle=True) # # for epoch in range(total_epochs): # # Make", "to find the most optimal point in set N_i(r) # classified as +ve", "points initially at random around the positive training data points 2) Gradient ascent", "weights trainable # self.model.train() # # # Placeholder for the respective 2 loss", "epoch_adv_loss = torch.tensor([0]).type(torch.float32).to(device) # AdvLoss # epoch_ce_loss = 0 # Cross entropy Loss", "N_i(r) classified as +ve (label=0). This is done by maximizing the CE loss", "dim=1) adv_loss = F.binary_cross_entropy_with_logits(adv_pred, new_targets) return adv_loss # class DROCC(nn.Module): # def __init__(self,", "verbose=False, # learning_rate=5e-4, # total_epochs=20, # only_ce_epochs=2, # ascent_step_size=5e-6, # ascent_num_steps=10, # gamma_lr=0.96,", "logits = torch.squeeze(logits, dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits, target) # Add to the epoch", "# best_model = copy.deepcopy(self.model) # # print('Epoch: {}, CE Loss: {}, AdvLoss: {},", "logits_start = logits_start[unl_ind] # AdvLoss if not self.half: adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), self.half) else:", "points (label==1). # ''' # if epoch >= only_ce_epochs and unl_cnt > 1:", "adv_pred = torch.squeeze(adv_pred, dim=1) adv_loss = F.binary_cross_entropy_with_logits(adv_pred, new_targets) return adv_loss # class PU_DROCC(nn.Module):", "% 10 == 0: # # Project the normal points to the set", "points to the set N_i(r) # h = x_adv_sampled - x_train_data # norm_h", "[x_adv_sampled])[0] grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim()))) grad_norm = grad_norm.view(-1, *[1] * (grad.dim()", "0: # # Project the normal points to the set N_i(r) # h", "batch_size=512, # half=True): # # self.best_score = -np.inf # best_model = None #", "new_targets) # # grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0] # grad_norm = torch.norm(grad, p=2, dim=tuple(range(1,", "classified as +ve (label=0). This is done by maximizing the CE loss wrt", "in set N_i(r) # classified as +ve (label=0). This is done by maximizing", "Loss # epoch_ce_loss += ce_loss # # ''' # Adversarial Loss is calculated", "# loss.backward() # self.optimizer.step() # # epoch_ce_loss = epoch_ce_loss / (batch_idx + 1)", "None # self.ascent_num_steps = ascent_num_steps # self.ascent_step_size = ascent_step_size # self.lamda = lamda", "torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") # code DROCC is borrowed from https://github.com/microsoft/EdgeML class", "else: adv_loss = self.one_class_adv_loss(logits_start.detach(), self.half) loss = ce_loss + adv_loss * self.lam else:", "# test_metric = roc_auc_score(labels, scores) # if metric == 'alpha': # test_metric =", "self.radius = radius # self.gamma = gamma # # self.optimizer = optim.Adam(self.model.parameters(), lr=learning_rate)", "1: # logits_start = logits_start[unl_ind] # # AdvLoss # if not half: #", "(label==1). # ''' # if epoch >= only_ce_epochs and unl_cnt > 1: #", "the calculated adversarial points through the model, and calculate the CE loss wrt", "else: # If only CE based training has to be done loss =", "max(lab_ind.sum(), 1) unl_cnt = max(unl_ind.sum(), 1) # Extract the logits for cross entropy", "{}, CE Loss: {}, AdvLoss: {}, {}: {}'.format( # epoch, epoch_ce_loss.item(), epoch_adv_loss.item(), #", "''' # if epoch >= only_ce_epochs: # logits_start = logits_start[target == 1] #", "# data, target = data.to(device), target.to(device) # # Data Processing # data =", "target.to(torch.float) # target = torch.squeeze(target) # # self.optimizer.zero_grad() # # lab_ind = target", "lamda=0.5, # radius=1, # gamma=2, # verbose=False, # learning_rate=5e-4, # total_epochs=20, # only_ce_epochs=2,", "adv_loss = self.one_class_adv_loss(logits_start[unl_ind].detach(), self.half) loss = ce_loss + adv_loss * self.lam else: #", "on the surface of hyper-sphere if half: adv_pred = self.model.forward_end(x_adv_sampled) else: adv_pred =", "{}, AdvLoss: {}, {}: {}'.format( # epoch, epoch_ce_loss.item(), epoch_adv_loss.item(), # 'AUC', test_score)) #", "epoch variable for printing average CE Loss # epoch_ce_loss += ce_loss # #", "This is done by maximizing the CE loss wrt label 0 3) Project", "step in range(self.ascent_num_steps): # with torch.enable_grad(): # # new_targets = torch.zeros(batch_size, 1).to(device) #", "self.model.forward_end(logits_start) logits = torch.squeeze(logits, dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits, target) # Add to the", "return test_metric # # def one_class_adv_loss(self, x_train_data, targets, half=True): # \"\"\"Computes the adversarial", "# x_adv_sampled.add_(self.ascent_step_size * grad_normalized) # # if (step + 1) % 10 ==", "the set N_i(r) # h = x_adv_sampled - x_train_data # norm_h = torch.sqrt(torch.sum(h", "self.gamma = gamma # # self.optimizer = optim.Adam(self.model.parameters(), lr=learning_rate) # lr_scheduler = optim.lr_scheduler.ExponentialLR(self.optimizer,", "# ---------- # x_train_data: Batch of data to compute loss on. # \"\"\"", "as +ve (label=0). This is done by maximizing # the CE loss wrt", "lamda=0.5, # radius=8, # gamma=2, # verbose=False, # learning_rate=1e-3, # total_epochs=30, # only_ce_epochs=6,", "for epoch in range(total_epochs): # # Make the weights trainable # self.model.train() #", "epoch_ce_loss / (batch_idx + 1) # Average CE Loss # epoch_adv_loss = epoch_adv_loss", "# 'AUC', self.best_score # )) # # def test(self, test_loader, metric='AUC'): # \"\"\"Evaluate", "= torch.squeeze(logits, dim=1) # ce_loss = F.binary_cross_entropy_with_logits(logits, target) # # Add to the", "# # batch_idx = -1 # for data, _, target in train_loader: #", "# new_targets = (1 - targets).to(self.device) new_targets = torch.squeeze(new_targets) new_targets = new_targets.to(torch.float) if", "for the positive data points (label==1). ''' if self.epoch >= self.warmup_epochs and unl_cnt", "# batch_idx = -1 # for data, _, target in train_loader: # batch_idx", "torch.utils.data from models.base_models import OCModel, PUModelRandomBatch from models.classifiers import Net device = torch.device(\"cuda\"", "self.lamda # else: # # If only CE based training has to be", "run_train(self, # train_data, # test_data, # lamda=0.5, # radius=8, # gamma=2, # verbose=False,", "/ (batch_idx + 1) # Average CE Loss # epoch_adv_loss = epoch_adv_loss /", "the respective 2 loss values # epoch_adv_loss = torch.tensor([0]).type(torch.float32).to(device) # AdvLoss # epoch_ce_loss", "self.lam else: # If only CE based training has to be done loss", "# else: # adv_pred = self.model(x_adv_sampled) # # adv_pred = torch.squeeze(adv_pred, dim=1) #", "(h.dim() - 1)) # h = proj * h # x_adv_sampled = x_train_data", "= optim.Adam(self.model.parameters(), lr=learning_rate) # lr_scheduler = optim.lr_scheduler.ExponentialLR(self.optimizer, gamma=gamma_lr) # # train_loader = torch.utils.data.DataLoader(train_data,", "= self.one_class_adv_loss(logits_start[unl_ind].detach(), self.half) loss = ce_loss + adv_loss * self.lam else: # If", "# ascent_num_steps=50, # gamma_lr=1, # batch_size=128, # half=True): # # self.best_score = -np.inf", "train_loader = torch.utils.data.DataLoader(train_data, # batch_size=batch_size, # shuffle=True) # # test_loader = torch.utils.data.DataLoader(test_data, #", "shuffle=True) # # for epoch in range(total_epochs): # # Make the weights trainable", "# label_score += list(zip(target.cpu().data.numpy().tolist(), # scores.cpu().data.numpy().tolist())) # # Compute test score # labels,", "cross entropy loss # logits_start = self.model.half_forward_start(data) # logits = self.model.half_forward_end(logits_start) # #", "== 1].detach(), self.half) else: adv_loss = self.one_class_adv_loss(logits_start.detach(), self.half) loss = ce_loss + adv_loss", "return adv_loss # class DROCC(nn.Module): # def __init__(self, ): # super().__init__() # #", "the weights trainable # self.model.train() # # # Placeholder for the respective 2", "def load(self, path): # self.model.load_state_dict(torch.load(os.path.join(path, 'model.pt'))) class PU_DROCC(PUModelRandomBatch): def __init__(self, model=Net, lam=0.5, radius=8,", "= torch.randn(x_train_data.shape).to(device).detach().requires_grad_() x_adv_sampled = x_adv + x_train_data for step in range(self.ascent_num_steps): with torch.enable_grad():", "# shuffle=True) # # test_loader = torch.utils.data.DataLoader(test_data, # batch_size=batch_size, # shuffle=True) # #", "label_score = [] # batch_idx = -1 # for data, target, _ in", "logits = self.model.forward_end(logits_start[lab_ind]) logits = torch.squeeze(logits, dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # Add", "1 # data, target = data.to(device), target.to(device) # # Data Processing # data", "# Extract the logits for cross entropy loss # logits_start = self.model.half_forward_start(data) #", "batch): data, target = batch[0], batch[2] data, target = data.to(device), target.to(device) lab_ind =", "torch.autograd.grad(new_loss, [x_adv_sampled])[0] # grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim()))) # grad_norm = grad_norm.view(-1,", "the most optimal point in set N_i(r) classified as +ve (label=0). This is", "= target == 1 unl_ind = target == 0 # lab_cnt = max(lab_ind.sum(),", "= (scores > 0.5).mean() # return test_metric # # def one_class_adv_loss(self, x_train_data, targets,", "(grad.dim() - 1)) grad_normalized = grad / grad_norm with torch.no_grad(): x_adv_sampled.add_(self.ascent_step_size * grad_normalized)", "= (alpha / norm_h).view(-1, *[1] * (h.dim() - 1)) h = proj *", "else: # adv_pred = self.model(x_adv_sampled) # # adv_pred = torch.squeeze(adv_pred, dim=1) # adv_loss", "# Make use of broadcast to project h # proj = (alpha /", "half: # adv_loss = self.one_class_adv_loss(data[target == 1].detach(), target[target == 1], half) # else:", "path): # torch.save(self.model.state_dict(), os.path.join(path, 'model.pt')) # # def load(self, path): # self.model.load_state_dict(torch.load(os.path.join(path, 'model.pt')))", "# data = data.to(torch.float) # target = target.to(torch.float) # target = torch.squeeze(target) #", "= torch.clamp(norm_h, self.radius, self.gamma * self.radius).to(device) # Make use of broadcast to project", "= torch.clamp(norm_h, self.radius, # self.gamma * self.radius).to(device) # # Make use of broadcast", "# epoch, epoch_ce_loss.item(), epoch_adv_loss.item(), # 'AUC', test_score)) # lr_scheduler.step() # if verbose: #", "targets, half=True): # \"\"\"Computes the adversarial loss: # 1) Sample points initially at", "Add to the epoch variable for printing average CE Loss ''' Adversarial Loss", "+ h # These adv_points are now on the surface of hyper-sphere #", "to the epoch variable for printing average CE Loss # epoch_ce_loss += ce_loss", "adv_pred = torch.squeeze(adv_pred, dim=1) adv_loss = F.binary_cross_entropy_with_logits(adv_pred, new_targets) return adv_loss # class DROCC(nn.Module):", "adv_loss # class PU_DROCC(nn.Module): # def __init__(self, ): # super().__init__() # # self.model", "loss logits_start = self.model.forward_start(data) logits = self.model.forward_end(logits_start) logits = torch.squeeze(logits, dim=1) ce_loss =", "# # Project the normal points to the set N_i(r) # h =", "h.dim())))) # alpha = torch.clamp(norm_h, self.radius, # self.gamma * self.radius).to(device) # # Make", "* self.radius).to(device) # # Make use of broadcast to project h # proj", "new_targets = (1 - targets).to(self.device) # new_targets = torch.squeeze(new_targets) # new_targets = new_targets.to(torch.float)", "grad.dim()))) grad_norm = grad_norm.view(-1, *[1] * (grad.dim() - 1)) grad_normalized = grad /", "the surface of hyper-sphere # # if half: # adv_pred = self.model.half_forward_end(x_adv_sampled) #", "broadcast to project h proj = (alpha / norm_h).view(-1, *[1] * (h.dim() -", "We will perform SGD on these to find the adversarial points x_adv =", "metric == 'alpha': # test_metric = (scores > 0.5).mean() # return test_metric #", "loss on. # \"\"\" # batch_size = len(x_train_data) # # Randomly sample points", "# super().__init__() # # self.model = CIFAR10_LeNet() # # def run_train(self, # train_data,", "== 0 # lab_cnt = max(lab_ind.sum(), 1) unl_cnt = max(unl_ind.sum(), 1) # Extract", "= torch.squeeze(logits, dim=1) # ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # # Add to the", "= target == 1 # unl_ind = target == 0 # # #", "logits = self.model.half_forward_end(x_adv_sampled) # else: # logits = self.model(x_adv_sampled) # # logits =", "the given test dataset. # Parameters # ---------- # test_loader: Dataloader object for", "# proj = (alpha / norm_h).view(-1, *[1] * (h.dim() - 1)) # h", "= torch.norm(grad, p=2, dim=tuple(range(1, grad.dim()))) grad_norm = grad_norm.view(-1, *[1] * (grad.dim() - 1))", "self.warmup_epochs and unl_cnt > 1: logits_start = logits_start[unl_ind] # AdvLoss if not self.half:", "# Data Processing data = data.to(torch.float) target = target.to(torch.float) target = torch.squeeze(target) #", "+ 1) # Average CE Loss # epoch_adv_loss = epoch_adv_loss / (batch_idx +", "data to compute loss on. \"\"\" batch_size = len(x_train_data) # Randomly sample points", "self.one_class_adv_loss(data[target == 1].detach(), target[target == 1], half) # else: # adv_loss = self.one_class_adv_loss(logits_start.detach(),", "torch.squeeze(logits, dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits, target) # Add to the epoch variable for", "= ce_loss # # # Backprop # loss.backward() # self.optimizer.step() # # epoch_ce_loss", "= data.to(device), target.to(device) # Data Processing data = data.to(torch.float) target = target.to(torch.float) target", "# gamma_lr=0.96, # batch_size=512, # half=True): # # self.best_score = -np.inf # best_model", "# # Backprop # loss.backward() # self.optimizer.step() # # epoch_ce_loss = epoch_ce_loss /", "variable for printing average CE Loss ''' Adversarial Loss is calculated only for", "total_epochs=20, # only_ce_epochs=2, # ascent_step_size=5e-6, # ascent_num_steps=10, # gamma_lr=0.96, # batch_size=512, # half=True):", "# h = proj * h # x_adv_sampled = x_train_data + h #", "classified as +ve (label=0). This is done by maximizing # the CE loss", "project h # proj = (alpha / norm_h).view(-1, *[1] * (h.dim() - 1))", "CE Loss # epoch_adv_loss = epoch_adv_loss / (batch_idx + 1) # Average AdvLoss", "# Cross entropy Loss # # batch_idx = -1 # for data, target,", "points # 2) Gradient ascent to find the most optimal point in set", "# # for epoch in range(total_epochs): # # Make the weights trainable #", "batch[0], batch[2] data, target = data.to(device), target.to(device) lab_ind = target == 1 unl_ind", "= len(x_train_data) # # Randomly sample points around the training data # #", "[x_adv_sampled])[0] # grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim()))) # grad_norm = grad_norm.view(-1, *[1]", "# batch_idx += 1 # data, target = data.to(device), target.to(device) # data =", "* grad_normalized) if (step + 1) % 10 == 0: # Project the", "train_data, # test_data, # lamda=0.5, # radius=8, # gamma=2, # verbose=False, # learning_rate=1e-3,", "AdvLoss # # if verbose: # test_score = self.test(test_loader) # if test_score >", "zip(*label_score) # labels = np.array(labels) # scores = np.array(scores) # if metric ==", "Gradient ascent to find the most optimal point in set N_i(r) classified as", "batch_size = len(x_train_data) # # Randomly sample points around the training data #", "np.array(labels) # scores = np.array(scores) # if metric == 'AUC': # test_metric =", "- 1)) # h = proj * h # x_adv_sampled = x_train_data +", "normal points to the set N_i(r) # h = x_adv_sampled - x_train_data #", "around the positive training data points 2) Gradient ascent to find the most", "the CE loss wrt target class 0 Parameters ---------- x_train_data: Batch of data", "# logits = torch.squeeze(logits, dim=1) # new_loss = F.binary_cross_entropy_with_logits(logits, new_targets) # # grad", "= lamda # self.radius = radius # self.gamma = gamma # # self.optimizer", "calculate the CE loss wrt target class 0 # # Parameters # ----------", "test_data, # lamda=0.5, # radius=8, # gamma=2, # verbose=False, # learning_rate=1e-3, # total_epochs=30,", "only_ce_epochs=2, # ascent_step_size=5e-6, # ascent_num_steps=10, # gamma_lr=0.96, # batch_size=512, # half=True): # #", "= x_adv_sampled - x_train_data # norm_h = torch.sqrt(torch.sum(h ** 2, # dim=tuple(range(1, h.dim()))))", "# adv_pred = self.model.half_forward_end(x_adv_sampled) # else: # adv_pred = self.model(x_adv_sampled) # # adv_pred", "these to find the adversarial points # x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() # x_adv_sampled =", "dim=tuple(range(1, h.dim())))) alpha = torch.clamp(norm_h, self.radius, self.gamma * self.radius).to(device) # Make use of", "AdvLoss # if not half: # adv_loss = self.one_class_adv_loss(data[target == 1].detach(), target[target ==", "for the positive data points (label==1). # ''' # if epoch >= only_ce_epochs:", "and calculate the CE loss wrt target class 0 Parameters ---------- x_train_data: Batch", "torch.squeeze(logits, dim=1) # ce_loss = F.binary_cross_entropy_with_logits(logits, target) # # Add to the epoch", "torch.no_grad(): x_adv_sampled.add_(self.ascent_step_size * grad_normalized) if (step + 1) % 10 == 0: #", "= gamma self.warmup_epochs = warmup_epochs self.ascent_step_size = ascent_step_size self.ascent_num_steps = ascent_num_steps self.half =", "torch.autograd.grad(new_loss, [x_adv_sampled])[0] grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim()))) grad_norm = grad_norm.view(-1, *[1] *", "and gamma * R (set N_i(r)) 4) Pass the calculated adversarial points through", "maximizing # the CE loss wrt label 0 # 3) Project the points", "# self.optimizer = optim.Adam(self.model.parameters(), lr=learning_rate) # lr_scheduler = optim.lr_scheduler.ExponentialLR(self.optimizer, gamma=gamma_lr) # # train_loader", "= F.binary_cross_entropy_with_logits(adv_pred, new_targets) return adv_loss # class PU_DROCC(nn.Module): # def __init__(self, ): #", "spheres of radius R and gamma * R (set N_i(r)) 4) Pass the", "+= adv_loss # # loss = ce_loss + adv_loss * self.lamda # else:", "= data.to(torch.float) # target = target.to(torch.float) # target = torch.squeeze(target) # # logits", "[] # batch_idx = -1 # for data, target, _ in test_loader: #", "point in set N_i(r) classified as +ve (label=0). This is done by maximizing", "target, _ in train_loader: # batch_idx += 1 # data, target = data.to(device),", "step in range(self.ascent_num_steps): with torch.enable_grad(): new_targets = torch.zeros(batch_size, 1).to(device) # new_targets = (1", "loss = ce_loss + adv_loss * self.lamda # else: # # If only", "model=Net, lam=0.5, radius=8, gamma=2, warmup_epochs=6, ascent_step_size=0.001, ascent_num_steps=50, half=True): super().__init__(model, 0) self.lam = lam", "These adv_points are now on the surface of hyper-sphere if half: adv_pred =", "1)) grad_normalized = grad / grad_norm with torch.no_grad(): x_adv_sampled.add_(self.ascent_step_size * grad_normalized) if (step", "= max(unl_ind.sum(), 1) # Extract the logits for cross entropy loss logits_start =", "2, dim=tuple(range(1, h.dim())))) alpha = torch.clamp(norm_h, self.radius, self.gamma * self.radius).to(device) # Make use", "target[unl_ind], half) # else: # adv_loss = self.one_class_adv_loss(logits_start.detach(), target[unl_ind], half) # epoch_adv_loss +=", "h # These adv_points are now on the surface of hyper-sphere # #", "# lr_scheduler = optim.lr_scheduler.ExponentialLR(self.optimizer, gamma=gamma_lr) # # train_loader = torch.utils.data.DataLoader(train_data, # batch_size=batch_size, #", "and unl_cnt > 1: # logits_start = logits_start[unl_ind] # # AdvLoss # if", "logits_start[unl_ind] # # AdvLoss # if not half: # adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), target[unl_ind],", "target[lab_ind]) # Add to the epoch variable for printing average CE Loss '''", "= data.to(device), target.to(device) lab_ind = target == 1 unl_ind = target == 0", "= F.binary_cross_entropy_with_logits(logits, target) # Add to the epoch variable for printing average CE", "Project the normal points to the set N_i(r) h = x_adv_sampled - x_train_data", "logits = self.model.forward_end(logits_start) logits = torch.squeeze(logits, dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits, target) # Add", "adv_loss = self.one_class_adv_loss(logits_start.detach(), target[unl_ind], half) # epoch_adv_loss += adv_loss # # loss =", "- targets).to(self.device) # new_targets = torch.squeeze(new_targets) # new_targets = new_targets.to(torch.float) # # if", ">= only_ce_epochs: # logits_start = logits_start[target == 1] # # AdvLoss # if", "torch.enable_grad(): new_targets = torch.zeros(batch_size, 1).to(device) # new_targets = (1 - targets).to(self.device) new_targets =", "# gamma=2, # verbose=False, # learning_rate=1e-3, # total_epochs=30, # only_ce_epochs=6, # ascent_step_size=0.001, #", "metric='AUC'): # \"\"\"Evaluate the model on the given test dataset. # Parameters #", "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") # code DROCC is borrowed from", "loss.backward() # self.optimizer.step() # # epoch_ce_loss = epoch_ce_loss / (batch_idx + 1) #", "in set N_i(r) classified as +ve (label=0). This is done by maximizing the", "for cross entropy loss # logits_start = self.model.half_forward_start(data) # logits = self.model.half_forward_end(logits_start) #", "# self.model.train() # # # Placeholder for the respective 2 loss values #", "# else: # adv_loss = self.one_class_adv_loss(logits_start.detach(), target[target == 1], half) # epoch_adv_loss +=", "# if metric == 'AUC': # test_metric = roc_auc_score(labels, scores) # if metric", "Data Processing data = data.to(torch.float) target = target.to(torch.float) target = torch.squeeze(target) # Extract", "grad_normalized) if (step + 1) % 10 == 0: # Project the normal", "= epoch_adv_loss / (batch_idx + 1) # Average AdvLoss # # if verbose:", "self.model = CIFAR10_LeNet() # # def run_train(self, # train_data, # test_data, # lamda=0.5,", "Batch of data to compute loss on. # \"\"\" # batch_size = len(x_train_data)", "torch.squeeze(logits, dim=1) new_loss = F.binary_cross_entropy_with_logits(logits, new_targets) grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0] grad_norm = torch.norm(grad,", "Placeholder for the respective 2 loss values # epoch_adv_loss = torch.tensor([0]).type(torch.float32).to(device) # AdvLoss", "labels = np.array(labels) # scores = np.array(scores) # if metric == 'AUC': #", "= torch.squeeze(adv_pred, dim=1) # adv_loss = F.binary_cross_entropy_with_logits(adv_pred, (new_targets)) # # return adv_loss #", "self.best_score = test_score # best_model = copy.deepcopy(self.model) # # print('Epoch: {}, CE Loss:", "points initially at random around the positive training # data points # 2)", "''' if self.epoch >= self.warmup_epochs and unl_cnt > 1: logits_start = logits_start[unl_ind] #", "loss: 1) Sample points initially at random around the positive training data points", "of hyper-sphere if half: adv_pred = self.model.forward_end(x_adv_sampled) else: adv_pred = self.model(x_adv_sampled) adv_pred =", "batch_idx = -1 # for data, target, _ in train_loader: # batch_idx +=", "epoch >= only_ce_epochs: # logits_start = logits_start[target == 1] # # AdvLoss #", "for printing average CE Loss ''' Adversarial Loss is calculated only for the", "== 0: # # Project the normal points to the set N_i(r) #", "targets).to(self.device) # new_targets = torch.squeeze(new_targets) # new_targets = new_targets.to(torch.float) # # if half:", "return loss def one_class_adv_loss(self, x_train_data, half=True): \"\"\"Computes the adversarial loss: 1) Sample points", "data # # We will perform SGD on these to find the adversarial", "# lab_cnt = max(lab_ind.sum(), 1) unl_cnt = max(unl_ind.sum(), 1) # Extract the logits", "== 1], half) # else: # adv_loss = self.one_class_adv_loss(logits_start.detach(), target[target == 1], half)", "> 1: logits_start = logits_start[unl_ind] # AdvLoss if not self.half: adv_loss = self.one_class_adv_loss(data[unl_ind].detach(),", "(batch_idx + 1) # Average CE Loss # epoch_adv_loss = epoch_adv_loss / (batch_idx", "not half: # adv_loss = self.one_class_adv_loss(data[target == 1].detach(), target[target == 1], half) #", "{}'.format( # 'AUC', self.best_score # )) # # def test(self, test_loader, metric='AUC'): #", "entropy loss logits_start = self.model.forward_start(data) logits = self.model.forward_end(logits_start[lab_ind]) logits = torch.squeeze(logits, dim=1) ce_loss", "# lamda=0.5, # radius=8, # gamma=2, # verbose=False, # learning_rate=1e-3, # total_epochs=30, #", "/ norm_h).view(-1, *[1] * (h.dim() - 1)) # h = proj * h", "grad_norm.view(-1, *[1] * (grad.dim() - 1)) # grad_normalized = grad / grad_norm #", "target[lab_ind]) # # Add to the epoch variable for printing average CE Loss", "# epoch_ce_loss = 0 # Cross entropy Loss # # batch_idx = -1", "= logits_start[target == 1] # AdvLoss if not self.half: adv_loss = self.one_class_adv_loss(data[target ==", "train_data, # test_data, # lamda=0.5, # radius=1, # gamma=2, # verbose=False, # learning_rate=5e-4,", "max(unl_ind.sum(), 1) # Extract the logits for cross entropy loss logits_start = self.model.forward_start(data)", "of data to compute loss on. \"\"\" batch_size = len(x_train_data) # Randomly sample", "# Project the normal points to the set N_i(r) # h = x_adv_sampled", "data = data.to(torch.float) target = target.to(torch.float) target = torch.squeeze(target) # Extract the logits", "return adv_loss # class PU_DROCC(nn.Module): # def __init__(self, ): # super().__init__() # #", "of hyper-sphere # # if half: # adv_pred = self.model.half_forward_end(x_adv_sampled) # else: #", "CE based training has to be done # loss = ce_loss # #", "# adv_pred = self.model(x_adv_sampled) # # adv_pred = torch.squeeze(adv_pred, dim=1) # adv_loss =", "norm_h = torch.sqrt(torch.sum(h ** 2, # dim=tuple(range(1, h.dim())))) # alpha = torch.clamp(norm_h, self.radius,", "data points (label==1). ''' if self.epoch >= self.warmup_epochs and unl_cnt > 1: logits_start", "# x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() # x_adv_sampled = x_adv + x_train_data # # for", "= target.to(torch.float) # target = torch.squeeze(target) # # logits = self.model(data) # logits", "class DROCC(nn.Module): # def __init__(self, ): # super().__init__() # # self.model = CIFAR10_LeNet()", "unl_cnt = max(unl_ind.sum(), 1) # Extract the logits for cross entropy loss logits_start", "Cross entropy Loss # # batch_idx = -1 # for data, target, _", "\"\"\" # batch_size = len(x_train_data) # # Randomly sample points around the training", "ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # Add to the epoch variable for printing average", "with torch.enable_grad(): # # new_targets = torch.zeros(batch_size, 1).to(device) # # new_targets = (1", "best_model = copy.deepcopy(self.model) # # print('Epoch: {}, CE Loss: {}, AdvLoss: {}, {}:", "# self.model.load_state_dict(torch.load(os.path.join(path, 'model.pt'))) class PU_DROCC(PUModelRandomBatch): def __init__(self, model=Net, lam=0.5, radius=8, gamma=2, warmup_epochs=6, ascent_step_size=0.001,", "positive training data points 2) Gradient ascent to find the most optimal point", "* (grad.dim() - 1)) grad_normalized = grad / grad_norm with torch.no_grad(): x_adv_sampled.add_(self.ascent_step_size *", "F.binary_cross_entropy_with_logits(logits, new_targets) grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0] grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim()))) grad_norm", "+ 1) % 10 == 0: # Project the normal points to the", "# def run_train(self, # train_data, # test_data, # lamda=0.5, # radius=1, # gamma=2,", "data.to(torch.float) # target = target.to(torch.float) # target = torch.squeeze(target) # # logits =", "grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0] # grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim()))) # grad_norm", "metric == 'AUC': # test_metric = roc_auc_score(labels, scores) # if metric == 'alpha':", "spheres of radius R and gamma * R # (set N_i(r)) # 4)", "target == 1 unl_ind = target == 0 # lab_cnt = max(lab_ind.sum(), 1)", "points (label==1). ''' if self.epoch >= self.warmup_epochs and unl_cnt > 1: logits_start =", "self.half) else: adv_loss = self.one_class_adv_loss(logits_start[unl_ind].detach(), self.half) loss = ce_loss + adv_loss * self.lam", "entropy Loss # # batch_idx = -1 # for data, target, _ in", "proj = (alpha / norm_h).view(-1, *[1] * (h.dim() - 1)) h = proj", "self.epoch >= self.warmup_epochs: logits_start = logits_start[target == 1] # AdvLoss if not self.half:", "based training has to be done loss = ce_loss return loss def one_class_adv_loss(self,", "# new_targets = torch.squeeze(new_targets) # new_targets = new_targets.to(torch.float) # # if half: #", "radius=1, # gamma=2, # verbose=False, # learning_rate=5e-4, # total_epochs=20, # only_ce_epochs=2, # ascent_step_size=5e-6,", "---------- # x_train_data: Batch of data to compute loss on. # \"\"\" #", "# for data, _, target in train_loader: # batch_idx += 1 # data,", "# # Randomly sample points around the training data # # We will", "# radius=1, # gamma=2, # verbose=False, # learning_rate=5e-4, # total_epochs=20, # only_ce_epochs=2, #", "(set N_i(r)) # 4) Pass the calculated adversarial points through the model, #", "# for step in range(self.ascent_num_steps): # with torch.enable_grad(): # # new_targets = torch.zeros(batch_size,", "def batch_loss(self, batch): data, target = batch[0], batch[2] data, target = data.to(device), target.to(device)", "# radius=8, # gamma=2, # verbose=False, # learning_rate=1e-3, # total_epochs=30, # only_ce_epochs=6, #", "new_targets = torch.zeros(batch_size, 1).to(device) # # new_targets = (1 - targets).to(self.device) # new_targets", "run_train(self, # train_data, # test_data, # lamda=0.5, # radius=1, # gamma=2, # verbose=False,", "Backprop # loss.backward() # self.optimizer.step() # # epoch_ce_loss = epoch_ce_loss / (batch_idx +", "for the respective 2 loss values # epoch_adv_loss = torch.tensor([0]).type(torch.float32).to(device) # AdvLoss #", "loss: # 1) Sample points initially at random around the positive training #", "self.ascent_step_size = ascent_step_size self.ascent_num_steps = ascent_num_steps self.half = half def batch_loss(self, batch): data,", "batch_size=128, # half=True): # # self.best_score = -np.inf # best_model = None #", "batch_size=batch_size, # shuffle=True) # # for epoch in range(total_epochs): # # Make the", "# These adv_points are now on the surface of hyper-sphere # # if", "These adv_points are now on the surface of hyper-sphere # # if half:", "# If only CE based training has to be done loss = ce_loss", "torch.zeros(batch_size, 1).to(device) # new_targets = (1 - targets).to(self.device) new_targets = torch.squeeze(new_targets) new_targets =", "# norm_h = torch.sqrt(torch.sum(h ** 2, # dim=tuple(range(1, h.dim())))) # alpha = torch.clamp(norm_h,", "class 0 Parameters ---------- x_train_data: Batch of data to compute loss on. \"\"\"", "loss values # epoch_adv_loss = torch.tensor([0]).type(torch.float32).to(device) # AdvLoss # epoch_ce_loss = 0 #", "+ x_train_data for step in range(self.ascent_num_steps): with torch.enable_grad(): new_targets = torch.zeros(batch_size, 1).to(device) #", "x_adv_sampled.add_(self.ascent_step_size * grad_normalized) # # if (step + 1) % 10 == 0:", "target.to(device) # # Data Processing # data = data.to(torch.float) # target = target.to(torch.float)", "Adversarial Loss is calculated only for the positive data points (label==1). ''' if", "target = torch.squeeze(target) # # self.optimizer.zero_grad() # # lab_ind = target == 1", "# ascent_num_steps=10, # gamma_lr=0.96, # batch_size=512, # half=True): # # self.best_score = -np.inf", "self.model = copy.deepcopy(best_model) # print('\\nBest test {}: {}'.format( # 'AUC', self.best_score # ))", "target = torch.squeeze(target) # # logits = self.model(data) # logits = torch.squeeze(logits, dim=1)", "for the positive data points (label==1). ''' if self.epoch >= self.warmup_epochs: logits_start =", "Batch of data to compute loss on. \"\"\" batch_size = len(x_train_data) # Randomly", "# ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # # Add to the epoch variable for", "= target == 0 # lab_cnt = max(lab_ind.sum(), 1) unl_cnt = max(unl_ind.sum(), 1)", "only_ce_epochs and unl_cnt > 1: # logits_start = logits_start[unl_ind] # # AdvLoss #", "# # test_loader = torch.utils.data.DataLoader(test_data, # batch_size=batch_size, # shuffle=True) # # for epoch", "test_loader, metric='AUC'): # \"\"\"Evaluate the model on the given test dataset. # Parameters", "test score # labels, scores = zip(*label_score) # labels = np.array(labels) # scores", "self.gamma * self.radius).to(device) # Make use of broadcast to project h proj =", "= torch.squeeze(adv_pred, dim=1) adv_loss = F.binary_cross_entropy_with_logits(adv_pred, new_targets) return adv_loss # class PU_DROCC(nn.Module): #", "F.binary_cross_entropy_with_logits(adv_pred, new_targets) return adv_loss # class DROCC(nn.Module): # def __init__(self, ): # super().__init__()", "dim=1) # adv_loss = F.binary_cross_entropy_with_logits(adv_pred, (new_targets)) # # return adv_loss # # def", "# test_loader: Dataloader object for the test dataset. # metric: Metric used for", "# self.ascent_step_size = ascent_step_size # self.lamda = lamda # self.radius = radius #", "unl_cnt > 1: logits_start = logits_start[unl_ind] # AdvLoss if not self.half: adv_loss =", "''' Adversarial Loss is calculated only for the positive data points (label==1). '''", "range(self.ascent_num_steps): with torch.enable_grad(): new_targets = torch.zeros(batch_size, 1).to(device) # new_targets = (1 - targets).to(self.device)", "# # if (step + 1) % 10 == 0: # # Project", "learning_rate=1e-3, # total_epochs=30, # only_ce_epochs=6, # ascent_step_size=0.001, # ascent_num_steps=50, # gamma_lr=1, # batch_size=128,", "calculated only for the positive data points (label==1). ''' if self.epoch >= self.warmup_epochs:", "# dim=tuple(range(1, h.dim())))) # alpha = torch.clamp(norm_h, self.radius, # self.gamma * self.radius).to(device) #", "# # lab_cnt = max(lab_ind.sum(), 1) # unl_cnt = max(unl_ind.sum(), 1) # #", "self.warmup_epochs: logits_start = logits_start[target == 1] # AdvLoss if not self.half: adv_loss =", "self.model.half_forward_end(x_adv_sampled) # else: # logits = self.model(x_adv_sampled) # # logits = torch.squeeze(logits, dim=1)", "self.half: adv_loss = self.one_class_adv_loss(data[target == 1].detach(), self.half) else: adv_loss = self.one_class_adv_loss(logits_start.detach(), self.half) loss", "1).to(device) # new_targets = (1 - targets).to(self.device) new_targets = torch.squeeze(new_targets) new_targets = new_targets.to(torch.float)", "F import torch.utils.data import torch.utils.data from models.base_models import OCModel, PUModelRandomBatch from models.classifiers import", "test_metric = roc_auc_score(labels, scores) # if metric == 'alpha': # test_metric = (scores", "loss def one_class_adv_loss(self, x_train_data, half=True): \"\"\"Computes the adversarial loss: 1) Sample points initially", "1] # AdvLoss if not self.half: adv_loss = self.one_class_adv_loss(data[target == 1].detach(), self.half) else:", "alpha = torch.clamp(norm_h, self.radius, self.gamma * self.radius).to(device) # Make use of broadcast to", "torch.sigmoid(logits) # scores = logits # label_score += list(zip(target.cpu().data.numpy().tolist(), # scores.cpu().data.numpy().tolist())) # #", "to project h proj = (alpha / norm_h).view(-1, *[1] * (h.dim() - 1))", "def __init__(self, ): # super().__init__() # # self.model = CIFAR10_LeNet() # # def", "through the model, # and calculate the CE loss wrt target class 0", "# def test(self, test_loader, metric='AUC'): # \"\"\"Evaluate the model on the given test", "0: # Project the normal points to the set N_i(r) h = x_adv_sampled", "if (step + 1) % 10 == 0: # # Project the normal", "PUModelRandomBatch from models.classifiers import Net device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") #", "h x_adv_sampled = x_train_data + h # These adv_points are now on the", "# print('Epoch: {}, CE Loss: {}, AdvLoss: {}, {}: {}'.format( # epoch, epoch_ce_loss.item(),", "F.binary_cross_entropy_with_logits(logits, target) # # Add to the epoch variable for printing average CE", "= batch[0], batch[2] data, target = data.to(device), target.to(device) lab_ind = target == 1", "cross entropy loss logits_start = self.model.forward_start(data) logits = self.model.forward_end(logits_start[lab_ind]) logits = torch.squeeze(logits, dim=1)", "perform SGD on these to find the adversarial points x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() x_adv_sampled", "h # These adv_points are now on the surface of hyper-sphere if half:", "torch.utils.data.DataLoader(train_data, # batch_size=batch_size, # shuffle=True) # # test_loader = torch.utils.data.DataLoader(test_data, # batch_size=batch_size, #", "done # loss = ce_loss # # # Backprop # loss.backward() # self.optimizer.step()", "# def load(self, path): # self.model.load_state_dict(torch.load(os.path.join(path, 'model.pt'))) class PU_DROCC(PUModelRandomBatch): def __init__(self, model=Net, lam=0.5,", "# adv_loss = F.binary_cross_entropy_with_logits(adv_pred, (new_targets)) # # return adv_loss # # def save(self,", "ascent_step_size=0.001, # ascent_num_steps=50, # gamma_lr=1, # batch_size=128, # half=True): # # self.best_score =", ")) # # def test(self, test_loader, metric='AUC'): # \"\"\"Evaluate the model on the", "and calculate the CE loss wrt target class 0 # # Parameters #", "dim=1) # ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # # Add to the epoch variable", "# self.optimizer.zero_grad() # # # Extract the logits for cross entropy loss #", "one_class_adv_loss(self, x_train_data, targets, half=True): # \"\"\"Computes the adversarial loss: # 1) Sample points", "not self.half: adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), self.half) else: adv_loss = self.one_class_adv_loss(logits_start[unl_ind].detach(), self.half) loss =", "new_targets = (1 - targets).to(self.device) new_targets = torch.squeeze(new_targets) new_targets = new_targets.to(torch.float) if half:", "# else: # # If only CE based training has to be done", "* R # (set N_i(r)) # 4) Pass the calculated adversarial points through", "train_loader: # batch_idx += 1 # data, target = data.to(device), target.to(device) # #", "if verbose: # self.model = copy.deepcopy(best_model) # print('\\nBest test {}: {}'.format( # 'AUC',", "* h # x_adv_sampled = x_train_data + h # These adv_points are now", "# Cross entropy Loss # # batch_idx = -1 # for data, _,", "10 == 0: # Project the normal points to the set N_i(r) h", "for evaluation (AUC / F1). # \"\"\" # self.model.eval() # label_score = []", "# for data, target, _ in train_loader: # batch_idx += 1 # data,", "# new_loss = F.binary_cross_entropy_with_logits(logits, new_targets) # # grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0] # grad_norm", "torch.clamp(norm_h, self.radius, self.gamma * self.radius).to(device) # Make use of broadcast to project h", "for cross entropy loss logits_start = self.model.forward_start(data) logits = self.model.forward_end(logits_start[lab_ind]) logits = torch.squeeze(logits,", "== 1 unl_ind = target == 0 # lab_cnt = max(lab_ind.sum(), 1) unl_cnt", "half: # logits = self.model.half_forward_end(x_adv_sampled) # else: # logits = self.model(x_adv_sampled) # #", "training data # We will perform SGD on these to find the adversarial", "# adv_loss = self.one_class_adv_loss(logits_start.detach(), target[unl_ind], half) # epoch_adv_loss += adv_loss # # loss", "torch.clamp(norm_h, self.radius, # self.gamma * self.radius).to(device) # # Make use of broadcast to", "x_adv + x_train_data for step in range(self.ascent_num_steps): with torch.enable_grad(): new_targets = torch.zeros(batch_size, 1).to(device)", "# logits = self.model(data) # logits = torch.squeeze(logits, dim=1) # sigmoid_logits = torch.sigmoid(logits)", "ascent_num_steps self.half = half def batch_loss(self, batch): data, target = batch[0], batch[2] data,", "4) Pass the calculated adversarial points through the model, # and calculate the", "scores = zip(*label_score) # labels = np.array(labels) # scores = np.array(scores) # if", "adv_pred = self.model.half_forward_end(x_adv_sampled) # else: # adv_pred = self.model(x_adv_sampled) # # adv_pred =", "= max(lab_ind.sum(), 1) # unl_cnt = max(unl_ind.sum(), 1) # # # Extract the", "half: # adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), target[unl_ind], half) # else: # adv_loss = self.one_class_adv_loss(logits_start.detach(),", "# # Add to the epoch variable for printing average CE Loss #", "= np.array(labels) # scores = np.array(scores) # if metric == 'AUC': # test_metric", "# Average CE Loss # epoch_adv_loss = epoch_adv_loss / (batch_idx + 1) #", "adv_loss = F.binary_cross_entropy_with_logits(adv_pred, (new_targets)) # # return adv_loss # # def save(self, path):", "F1). # \"\"\" # self.model.eval() # label_score = [] # batch_idx = -1", "# unl_ind = target == 0 # # # lab_cnt = max(lab_ind.sum(), 1)", "if test_score > self.best_score: # self.best_score = test_score # best_model = copy.deepcopy(self.model) #", "= ascent_step_size # self.lamda = lamda # self.radius = radius # self.gamma =", "adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), target[unl_ind], half) # else: # adv_loss = self.one_class_adv_loss(logits_start.detach(), target[unl_ind], half)", "= new_targets.to(torch.float) # # if half: # logits = self.model.half_forward_end(x_adv_sampled) # else: #", "1) # # # Extract the logits for cross entropy loss # logits_start", "p=2, dim=tuple(range(1, grad.dim()))) grad_norm = grad_norm.view(-1, *[1] * (grad.dim() - 1)) grad_normalized =", "R and gamma * R (set N_i(r)) 4) Pass the calculated adversarial points", "class DROCC(OCModel): def __init__(self, model=Net, lam=0.5, radius=8, gamma=2, warmup_epochs=6, ascent_step_size=0.001, ascent_num_steps=50, half=True): super().__init__(model,", "ascent_step_size self.ascent_num_steps = ascent_num_steps self.half = half def batch_loss(self, batch): data, target =", ">= only_ce_epochs and unl_cnt > 1: # logits_start = logits_start[unl_ind] # # AdvLoss", "epoch variable for printing average CE Loss ''' Adversarial Loss is calculated only", "norm_h).view(-1, *[1] * (h.dim() - 1)) # h = proj * h #", "target.to(device) # Data Processing data = data.to(torch.float) target = target.to(torch.float) target = torch.squeeze(target)", "data, target = batch[0], batch[2] data, target = data.to(device), target.to(device) # Data Processing", "+= ce_loss # # ''' # Adversarial Loss is calculated only for the", "x_train_data for step in range(self.ascent_num_steps): with torch.enable_grad(): new_targets = torch.zeros(batch_size, 1).to(device) # new_targets", "ascent_step_size=5e-6, # ascent_num_steps=10, # gamma_lr=0.96, # batch_size=512, # half=True): # # self.best_score =", "lab_ind = target == 1 # unl_ind = target == 0 # #", "# # # Backprop # loss.backward() # self.optimizer.step() # # epoch_ce_loss = epoch_ce_loss", "calculated adversarial points through the model, # and calculate the CE loss wrt", "target = data.to(device), target.to(device) # # Data Processing # data = data.to(torch.float) #", "scores = np.array(scores) # if metric == 'AUC': # test_metric = roc_auc_score(labels, scores)", "dim=tuple(range(1, h.dim())))) # alpha = torch.clamp(norm_h, self.radius, # self.gamma * self.radius).to(device) # #", "data points 2) Gradient ascent to find the most optimal point in set", "adversarial loss: # 1) Sample points initially at random around the positive training", "1 # unl_ind = target == 0 # # # lab_cnt = max(lab_ind.sum(),", "target in train_loader: # batch_idx += 1 # data, target = data.to(device), target.to(device)", "torch.squeeze(target) # # logits = self.model(data) # logits = torch.squeeze(logits, dim=1) # sigmoid_logits", "calculated only for the positive data points (label==1). # ''' # if epoch", "2 loss values # epoch_adv_loss = torch.tensor([0]).type(torch.float32).to(device) # AdvLoss # epoch_ce_loss = 0", "the logits for cross entropy loss logits_start = self.model.forward_start(data) logits = self.model.forward_end(logits_start[lab_ind]) logits", "= warmup_epochs self.ascent_step_size = ascent_step_size self.ascent_num_steps = ascent_num_steps self.half = half def batch_loss(self,", "target = target.to(torch.float) # target = torch.squeeze(target) # # self.optimizer.zero_grad() # # #", "# loss = ce_loss # # # Backprop # loss.backward() # self.optimizer.step() #", "adv_pred = self.model.forward_end(x_adv_sampled) else: adv_pred = self.model(x_adv_sampled) adv_pred = torch.squeeze(adv_pred, dim=1) adv_loss =", "ascent_step_size=0.001, ascent_num_steps=50, half=True): super().__init__(model, 0) self.lam = lam self.radius = radius self.gamma =", "# data, target = data.to(device), target.to(device) # data = data.to(torch.float) # target =", "the calculated adversarial points through the model, # and calculate the CE loss", "# Data Processing # data = data.to(torch.float) # target = target.to(torch.float) # target", "new_targets) return adv_loss # class DROCC(nn.Module): # def __init__(self, ): # super().__init__() #", "(label==1). ''' if self.epoch >= self.warmup_epochs: logits_start = logits_start[target == 1] # AdvLoss", "training has to be done # loss = ce_loss # # # Backprop", "radius R and gamma * R (set N_i(r)) 4) Pass the calculated adversarial", "0 # # # lab_cnt = max(lab_ind.sum(), 1) # unl_cnt = max(unl_ind.sum(), 1)", "adv_pred = self.model(x_adv_sampled) adv_pred = torch.squeeze(adv_pred, dim=1) adv_loss = F.binary_cross_entropy_with_logits(adv_pred, new_targets) return adv_loss", "in range(self.ascent_num_steps): # with torch.enable_grad(): # # new_targets = torch.zeros(batch_size, 1).to(device) # #", "is calculated only for the positive data points (label==1). ''' if self.epoch >=", "10 == 0: # # Project the normal points to the set N_i(r)", "used for evaluation (AUC / F1). # \"\"\" # self.model.eval() # label_score =", "'alpha': # test_metric = (scores > 0.5).mean() # return test_metric # # def", "Gradient ascent to find the most optimal point in set N_i(r) # classified", "test_score # best_model = copy.deepcopy(self.model) # # print('Epoch: {}, CE Loss: {}, AdvLoss:", "score # labels, scores = zip(*label_score) # labels = np.array(labels) # scores =", "/ grad_norm # with torch.no_grad(): # x_adv_sampled.add_(self.ascent_step_size * grad_normalized) # # if (step", "F.binary_cross_entropy_with_logits(logits, new_targets) # # grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0] # grad_norm = torch.norm(grad, p=2,", "the training data # # We will perform SGD on these to find", "model on the given test dataset. # Parameters # ---------- # test_loader: Dataloader", "self.half: adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), self.half) else: adv_loss = self.one_class_adv_loss(logits_start[unl_ind].detach(), self.half) loss = ce_loss", "# else: # adv_loss = self.one_class_adv_loss(logits_start.detach(), target[unl_ind], half) # epoch_adv_loss += adv_loss #", "of data to compute loss on. # \"\"\" # batch_size = len(x_train_data) #", "= torch.sqrt(torch.sum(h ** 2, dim=tuple(range(1, h.dim())))) alpha = torch.clamp(norm_h, self.radius, self.gamma * self.radius).to(device)", "target[target == 1], half) # epoch_adv_loss += adv_loss # # loss = ce_loss", "grad_normalized = grad / grad_norm with torch.no_grad(): x_adv_sampled.add_(self.ascent_step_size * grad_normalized) if (step +", "# logits = self.model.half_forward_end(x_adv_sampled) # else: # logits = self.model(x_adv_sampled) # # logits", "# ascent_step_size=0.001, # ascent_num_steps=50, # gamma_lr=1, # batch_size=128, # half=True): # # self.best_score", "normal points to the set N_i(r) h = x_adv_sampled - x_train_data norm_h =", "https://github.com/microsoft/EdgeML class DROCC(OCModel): def __init__(self, model=Net, lam=0.5, radius=8, gamma=2, warmup_epochs=6, ascent_step_size=0.001, ascent_num_steps=50, half=True):", "# logits = torch.squeeze(logits, dim=1) # sigmoid_logits = torch.sigmoid(logits) # scores = logits", "# Make the weights trainable # self.model.train() # # # Placeholder for the", "loss # logits_start = self.model.half_forward_start(data) # logits = self.model.half_forward_end(logits_start[lab_ind]) # # logits =", "# metric: Metric used for evaluation (AUC / F1). # \"\"\" # self.model.eval()", "# self.gamma = gamma # # self.optimizer = optim.Adam(self.model.parameters(), lr=learning_rate) # lr_scheduler =", "{}: {}'.format( # epoch, epoch_ce_loss.item(), epoch_adv_loss.item(), # 'AUC', test_score)) # lr_scheduler.step() # if", "# grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim()))) # grad_norm = grad_norm.view(-1, *[1] *", "1)) # grad_normalized = grad / grad_norm # with torch.no_grad(): # x_adv_sampled.add_(self.ascent_step_size *", "+= 1 # data, target = data.to(device), target.to(device) # # Data Processing #", "# # Data Processing # data = data.to(torch.float) # target = target.to(torch.float) #", "logits for cross entropy loss logits_start = self.model.forward_start(data) logits = self.model.forward_end(logits_start[lab_ind]) logits =", "target = data.to(device), target.to(device) # data = data.to(torch.float) # target = target.to(torch.float) #", "# grad_normalized = grad / grad_norm # with torch.no_grad(): # x_adv_sampled.add_(self.ascent_step_size * grad_normalized)", "If only CE based training has to be done loss = ce_loss return", "# AdvLoss # if not half: # adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), target[unl_ind], half) #", "import OCModel, PUModelRandomBatch from models.classifiers import Net device = torch.device(\"cuda\" if torch.cuda.is_available() else", "else: logits = self.model(x_adv_sampled) logits = torch.squeeze(logits, dim=1) new_loss = F.binary_cross_entropy_with_logits(logits, new_targets) grad", "= torch.squeeze(logits, dim=1) # new_loss = F.binary_cross_entropy_with_logits(logits, new_targets) # # grad = torch.autograd.grad(new_loss,", "Loss: {}, AdvLoss: {}, {}: {}'.format( # epoch, epoch_ce_loss.item(), epoch_adv_loss.item(), # 'AUC', test_score))", "# for data, target, _ in test_loader: # batch_idx += 1 # data,", "self.model(x_adv_sampled) logits = torch.squeeze(logits, dim=1) new_loss = F.binary_cross_entropy_with_logits(logits, new_targets) grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0]", "torch.cuda.is_available() else \"cpu\") # code DROCC is borrowed from https://github.com/microsoft/EdgeML class DROCC(OCModel): def", "_, target in train_loader: # batch_idx += 1 # data, target = data.to(device),", "# adv_pred = torch.squeeze(adv_pred, dim=1) # adv_loss = F.binary_cross_entropy_with_logits(adv_pred, (new_targets)) # # return", "entropy loss logits_start = self.model.forward_start(data) logits = self.model.forward_end(logits_start) logits = torch.squeeze(logits, dim=1) ce_loss", "the set N_i(r) h = x_adv_sampled - x_train_data norm_h = torch.sqrt(torch.sum(h ** 2,", "target[target == 1], half) # else: # adv_loss = self.one_class_adv_loss(logits_start.detach(), target[target == 1],", "# We will perform SGD on these to find the adversarial points #", "norm_h = torch.sqrt(torch.sum(h ** 2, dim=tuple(range(1, h.dim())))) alpha = torch.clamp(norm_h, self.radius, self.gamma *", "4) Pass the calculated adversarial points through the model, and calculate the CE", "self.one_class_adv_loss(data[unl_ind].detach(), self.half) else: adv_loss = self.one_class_adv_loss(logits_start[unl_ind].detach(), self.half) loss = ce_loss + adv_loss *", "labels, scores = zip(*label_score) # labels = np.array(labels) # scores = np.array(scores) #", "half) # else: # adv_loss = self.one_class_adv_loss(logits_start.detach(), target[unl_ind], half) # epoch_adv_loss += adv_loss", "target = torch.squeeze(target) # # self.optimizer.zero_grad() # # # Extract the logits for", "Loss ''' Adversarial Loss is calculated only for the positive data points (label==1).", "adversarial loss: 1) Sample points initially at random around the positive training data", "# verbose=False, # learning_rate=1e-3, # total_epochs=30, # only_ce_epochs=6, # ascent_step_size=0.001, # ascent_num_steps=50, #", "2, # dim=tuple(range(1, h.dim())))) # alpha = torch.clamp(norm_h, self.radius, # self.gamma * self.radius).to(device)", "= epoch_ce_loss / (batch_idx + 1) # Average CE Loss # epoch_adv_loss =", "= self.model(x_adv_sampled) # # logits = torch.squeeze(logits, dim=1) # new_loss = F.binary_cross_entropy_with_logits(logits, new_targets)", "batch_idx = -1 # for data, _, target in train_loader: # batch_idx +=", "batch): data, target = batch[0], batch[2] data, target = data.to(device), target.to(device) # Data", "h # x_adv_sampled = x_train_data + h # These adv_points are now on", "= torch.squeeze(target) # # logits = self.model(data) # logits = torch.squeeze(logits, dim=1) #", "# epoch_adv_loss = epoch_adv_loss / (batch_idx + 1) # Average AdvLoss # #", "self.gamma = gamma self.warmup_epochs = warmup_epochs self.ascent_step_size = ascent_step_size self.ascent_num_steps = ascent_num_steps self.half", "# adv_loss = self.one_class_adv_loss(data[target == 1].detach(), target[target == 1], half) # else: #", "1) # unl_cnt = max(unl_ind.sum(), 1) # # # Extract the logits for", "PU_DROCC(nn.Module): # def __init__(self, ): # super().__init__() # # self.model = CIFAR10_LeNet() #", "target) # Add to the epoch variable for printing average CE Loss '''", "target class 0 Parameters ---------- x_train_data: Batch of data to compute loss on.", "torch.squeeze(target) # # self.optimizer.zero_grad() # # # Extract the logits for cross entropy", "== 1 # unl_ind = target == 0 # # # lab_cnt =", "training # data points # 2) Gradient ascent to find the most optimal", "# alpha = torch.clamp(norm_h, self.radius, # self.gamma * self.radius).to(device) # # Make use", "logits_start = self.model.half_forward_start(data) # logits = self.model.half_forward_end(logits_start[lab_ind]) # # logits = torch.squeeze(logits, dim=1)", "targets).to(self.device) new_targets = torch.squeeze(new_targets) new_targets = new_targets.to(torch.float) if half: logits = self.model.forward_end(x_adv_sampled) else:", "F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # Add to the epoch variable for printing average CE Loss", "0) self.lam = lam self.radius = radius self.gamma = gamma self.warmup_epochs = warmup_epochs", "= x_train_data + h # These adv_points are now on the surface of", "only CE based training has to be done # loss = ce_loss #", "logits_start[target == 1] # AdvLoss if not self.half: adv_loss = self.one_class_adv_loss(data[target == 1].detach(),", "data, target = batch[0], batch[2] data, target = data.to(device), target.to(device) lab_ind = target", "import Net device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") # code DROCC is", "CIFAR10_LeNet() # # def run_train(self, # train_data, # test_data, # lamda=0.5, # radius=8,", "ce_loss # # ''' # Adversarial Loss is calculated only for the positive", "# # def run_train(self, # train_data, # test_data, # lamda=0.5, # radius=1, #", "len(x_train_data) # # Randomly sample points around the training data # # We", "= zip(*label_score) # labels = np.array(labels) # scores = np.array(scores) # if metric", "as +ve (label=0). This is done by maximizing the CE loss wrt label", "self.radius, self.gamma * self.radius).to(device) # Make use of broadcast to project h proj", "# # if half: # adv_pred = self.model.half_forward_end(x_adv_sampled) # else: # adv_pred =", "around the training data # # We will perform SGD on these to", "values # epoch_adv_loss = torch.tensor([0]).type(torch.float32).to(device) # AdvLoss # epoch_ce_loss = 0 # Cross", "= F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # # Add to the epoch variable for printing average", "/ grad_norm with torch.no_grad(): x_adv_sampled.add_(self.ascent_step_size * grad_normalized) if (step + 1) % 10", "torch import torch.nn.functional as F import torch.utils.data import torch.utils.data from models.base_models import OCModel,", "self.radius, # self.gamma * self.radius).to(device) # # Make use of broadcast to project", "= torch.squeeze(new_targets) # new_targets = new_targets.to(torch.float) # # if half: # logits =", "self.model(data) # logits = torch.squeeze(logits, dim=1) # sigmoid_logits = torch.sigmoid(logits) # scores =", "def run_train(self, # train_data, # test_data, # lamda=0.5, # radius=1, # gamma=2, #", "# return adv_loss # # def save(self, path): # torch.save(self.model.state_dict(), os.path.join(path, 'model.pt')) #", "target = target.to(torch.float) target = torch.squeeze(target) # Extract the logits for cross entropy", "# verbose=False, # learning_rate=5e-4, # total_epochs=20, # only_ce_epochs=2, # ascent_step_size=5e-6, # ascent_num_steps=10, #", "batch_idx = -1 # for data, target, _ in test_loader: # batch_idx +=", "data, target, _ in train_loader: # batch_idx += 1 # data, target =", "unl_ind = target == 0 # lab_cnt = max(lab_ind.sum(), 1) unl_cnt = max(unl_ind.sum(),", "# h = x_adv_sampled - x_train_data # norm_h = torch.sqrt(torch.sum(h ** 2, #", "target == 0 # # # lab_cnt = max(lab_ind.sum(), 1) # unl_cnt =", "CE loss wrt label 0 # 3) Project the points between spheres of", "# logits = self.model.half_forward_end(logits_start) # # logits = torch.squeeze(logits, dim=1) # ce_loss =", "Cross entropy Loss # # batch_idx = -1 # for data, _, target", "# 3) Project the points between spheres of radius R and gamma *", "batch[2] data, target = data.to(device), target.to(device) lab_ind = target == 1 unl_ind =", "adversarial points through the model, # and calculate the CE loss wrt target", "# sigmoid_logits = torch.sigmoid(logits) # scores = logits # label_score += list(zip(target.cpu().data.numpy().tolist(), #", "test_metric = (scores > 0.5).mean() # return test_metric # # def one_class_adv_loss(self, x_train_data,", "batch_idx += 1 # data, target = data.to(device), target.to(device) # data = data.to(torch.float)", "total_epochs=30, # only_ce_epochs=6, # ascent_step_size=0.001, # ascent_num_steps=50, # gamma_lr=1, # batch_size=128, # half=True):", "adv_pred = torch.squeeze(adv_pred, dim=1) # adv_loss = F.binary_cross_entropy_with_logits(adv_pred, (new_targets)) # # return adv_loss", "set N_i(r) # h = x_adv_sampled - x_train_data # norm_h = torch.sqrt(torch.sum(h **", "# scores.cpu().data.numpy().tolist())) # # Compute test score # labels, scores = zip(*label_score) #", "self.radius = radius self.gamma = gamma self.warmup_epochs = warmup_epochs self.ascent_step_size = ascent_step_size self.ascent_num_steps", "else: # # If only CE based training has to be done #", "p=2, dim=tuple(range(1, grad.dim()))) # grad_norm = grad_norm.view(-1, *[1] * (grad.dim() - 1)) #", "super().__init__() # # self.model = CIFAR10_LeNet() # # def run_train(self, # train_data, #", "entropy loss # logits_start = self.model.half_forward_start(data) # logits = self.model.half_forward_end(logits_start) # # logits", "Processing data = data.to(torch.float) target = target.to(torch.float) target = torch.squeeze(target) # Extract the", "def __init__(self, model=Net, lam=0.5, radius=8, gamma=2, warmup_epochs=6, ascent_step_size=0.001, ascent_num_steps=50, half=True): super().__init__(model, 0) self.lam", "gamma=gamma_lr) # # train_loader = torch.utils.data.DataLoader(train_data, # batch_size=batch_size, # shuffle=True) # # test_loader", "SGD on these to find the adversarial points # x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() #", "# Randomly sample points around the training data # We will perform SGD", "# # epoch_ce_loss = epoch_ce_loss / (batch_idx + 1) # Average CE Loss", "# label_score = [] # batch_idx = -1 # for data, target, _", "# batch_size = len(x_train_data) # # Randomly sample points around the training data", "in range(self.ascent_num_steps): with torch.enable_grad(): new_targets = torch.zeros(batch_size, 1).to(device) # new_targets = (1 -", "== 'AUC': # test_metric = roc_auc_score(labels, scores) # if metric == 'alpha': #", "points between spheres of radius R and gamma * R (set N_i(r)) 4)", "if half: adv_pred = self.model.forward_end(x_adv_sampled) else: adv_pred = self.model(x_adv_sampled) adv_pred = torch.squeeze(adv_pred, dim=1)", "# lab_ind = target == 1 # unl_ind = target == 0 #", "1) Sample points initially at random around the positive training # data points", "logits for cross entropy loss # logits_start = self.model.half_forward_start(data) # logits = self.model.half_forward_end(logits_start)", "- x_train_data # norm_h = torch.sqrt(torch.sum(h ** 2, # dim=tuple(range(1, h.dim())))) # alpha", "ce_loss + adv_loss * self.lamda # else: # # If only CE based", "grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim()))) grad_norm = grad_norm.view(-1, *[1] * (grad.dim() -", "= torch.squeeze(target) # # self.optimizer.zero_grad() # # # Extract the logits for cross", "= np.array(scores) # if metric == 'AUC': # test_metric = roc_auc_score(labels, scores) #", "- 1)) # grad_normalized = grad / grad_norm # with torch.no_grad(): # x_adv_sampled.add_(self.ascent_step_size", "verbose=False, # learning_rate=1e-3, # total_epochs=30, # only_ce_epochs=6, # ascent_step_size=0.001, # ascent_num_steps=50, # gamma_lr=1,", "1) % 10 == 0: # Project the normal points to the set", "torch.save(self.model.state_dict(), os.path.join(path, 'model.pt')) # # def load(self, path): # self.model.load_state_dict(torch.load(os.path.join(path, 'model.pt'))) class PU_DROCC(PUModelRandomBatch):", "lam self.radius = radius self.gamma = gamma self.warmup_epochs = warmup_epochs self.ascent_step_size = ascent_step_size", "loss wrt label 0 # 3) Project the points between spheres of radius", "compute loss on. # \"\"\" # batch_size = len(x_train_data) # # Randomly sample", "# batch_size=512, # half=True): # # self.best_score = -np.inf # best_model = None", "warmup_epochs self.ascent_step_size = ascent_step_size self.ascent_num_steps = ascent_num_steps self.half = half def batch_loss(self, batch):", "gamma self.warmup_epochs = warmup_epochs self.ascent_step_size = ascent_step_size self.ascent_num_steps = ascent_num_steps self.half = half", "logits = self.model.forward_end(x_adv_sampled) else: logits = self.model(x_adv_sampled) logits = torch.squeeze(logits, dim=1) new_loss =", "the normal points to the set N_i(r) # h = x_adv_sampled - x_train_data", "# code DROCC is borrowed from https://github.com/microsoft/EdgeML class DROCC(OCModel): def __init__(self, model=Net, lam=0.5,", "alpha = torch.clamp(norm_h, self.radius, # self.gamma * self.radius).to(device) # # Make use of", "else \"cpu\") # code DROCC is borrowed from https://github.com/microsoft/EdgeML class DROCC(OCModel): def __init__(self,", "Metric used for evaluation (AUC / F1). # \"\"\" # self.model.eval() # label_score", "*[1] * (h.dim() - 1)) # h = proj * h # x_adv_sampled", "ascent_step_size # self.lamda = lamda # self.radius = radius # self.gamma = gamma", "new_targets = torch.squeeze(new_targets) # new_targets = new_targets.to(torch.float) # # if half: # logits", "logits = self.model.half_forward_end(logits_start) # # logits = torch.squeeze(logits, dim=1) # ce_loss = F.binary_cross_entropy_with_logits(logits,", "half=True): super().__init__(model, 0) self.lam = lam self.radius = radius self.gamma = gamma self.warmup_epochs", "the logits for cross entropy loss logits_start = self.model.forward_start(data) logits = self.model.forward_end(logits_start) logits", "Pass the calculated adversarial points through the model, # and calculate the CE", "= self.model(x_adv_sampled) logits = torch.squeeze(logits, dim=1) new_loss = F.binary_cross_entropy_with_logits(logits, new_targets) grad = torch.autograd.grad(new_loss,", "to the set N_i(r) h = x_adv_sampled - x_train_data norm_h = torch.sqrt(torch.sum(h **", "1] # # AdvLoss # if not half: # adv_loss = self.one_class_adv_loss(data[target ==", "/ F1). # \"\"\" # self.model.eval() # label_score = [] # batch_idx =", "N_i(r)) # 4) Pass the calculated adversarial points through the model, # and", "wrt label 0 3) Project the points between spheres of radius R and", "# lab_cnt = max(lab_ind.sum(), 1) # unl_cnt = max(unl_ind.sum(), 1) # # #", "data, target = data.to(device), target.to(device) # data = data.to(torch.float) # target = target.to(torch.float)", "data, target = data.to(device), target.to(device) # # Data Processing # data = data.to(torch.float)", "self.optimizer.zero_grad() # # # Extract the logits for cross entropy loss # logits_start", "new_targets.to(torch.float) if half: logits = self.model.forward_end(x_adv_sampled) else: logits = self.model(x_adv_sampled) logits = torch.squeeze(logits,", "Dataloader object for the test dataset. # metric: Metric used for evaluation (AUC", "based training has to be done # loss = ce_loss # # #", "# batch_size=batch_size, # shuffle=True) # # test_loader = torch.utils.data.DataLoader(test_data, # batch_size=batch_size, # shuffle=True)", "range(self.ascent_num_steps): # with torch.enable_grad(): # # new_targets = torch.zeros(batch_size, 1).to(device) # # new_targets", "target = batch[0], batch[2] data, target = data.to(device), target.to(device) lab_ind = target ==", "to compute loss on. \"\"\" batch_size = len(x_train_data) # Randomly sample points around", "the positive training # data points # 2) Gradient ascent to find the", "# # # lab_cnt = max(lab_ind.sum(), 1) # unl_cnt = max(unl_ind.sum(), 1) #", "/ (batch_idx + 1) # Average AdvLoss # # if verbose: # test_score", "self.warmup_epochs = warmup_epochs self.ascent_step_size = ascent_step_size self.ascent_num_steps = ascent_num_steps self.half = half def", "adv_loss = self.one_class_adv_loss(data[target == 1].detach(), self.half) else: adv_loss = self.one_class_adv_loss(logits_start.detach(), self.half) loss =", "# \"\"\" # self.model.eval() # label_score = [] # batch_idx = -1 #", "# return test_metric # # def one_class_adv_loss(self, x_train_data, targets, half=True): # \"\"\"Computes the", "the positive data points (label==1). # ''' # if epoch >= only_ce_epochs and", "the test dataset. # metric: Metric used for evaluation (AUC / F1). #", "in test_loader: # batch_idx += 1 # data, target = data.to(device), target.to(device) #", "* self.lam else: # If only CE based training has to be done", "h # proj = (alpha / norm_h).view(-1, *[1] * (h.dim() - 1)) #", "test_score = self.test(test_loader) # if test_score > self.best_score: # self.best_score = test_score #", "): # super().__init__() # # self.model = CIFAR10_LeNet() # # def run_train(self, #", "super().__init__(model, 0) self.lam = lam self.radius = radius self.gamma = gamma self.warmup_epochs =", "gamma_lr=0.96, # batch_size=512, # half=True): # # self.best_score = -np.inf # best_model =", "load(self, path): # self.model.load_state_dict(torch.load(os.path.join(path, 'model.pt'))) class PU_DROCC(PUModelRandomBatch): def __init__(self, model=Net, lam=0.5, radius=8, gamma=2,", "= self.model(data) # logits = torch.squeeze(logits, dim=1) # sigmoid_logits = torch.sigmoid(logits) # scores", "CE loss wrt label 0 3) Project the points between spheres of radius", "= data.to(device), target.to(device) # # Data Processing # data = data.to(torch.float) # target", "# and calculate the CE loss wrt target class 0 # # Parameters", "ce_loss + adv_loss * self.lam else: # If only CE based training has", "target = torch.squeeze(target) # Extract the logits for cross entropy loss logits_start =", "the surface of hyper-sphere if half: adv_pred = self.model.forward_end(x_adv_sampled) else: adv_pred = self.model(x_adv_sampled)", "target.to(torch.float) # target = torch.squeeze(target) # # self.optimizer.zero_grad() # # # Extract the", "to compute loss on. # \"\"\" # batch_size = len(x_train_data) # # Randomly", "surface of hyper-sphere # # if half: # adv_pred = self.model.half_forward_end(x_adv_sampled) # else:", "points around the training data # We will perform SGD on these to", "hyper-sphere # # if half: # adv_pred = self.model.half_forward_end(x_adv_sampled) # else: # adv_pred", "= torch.squeeze(new_targets) new_targets = new_targets.to(torch.float) if half: logits = self.model.forward_end(x_adv_sampled) else: logits =", "perform SGD on these to find the adversarial points # x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_()", "data.to(device), target.to(device) # # Data Processing # data = data.to(torch.float) # target =", "torch.norm(grad, p=2, dim=tuple(range(1, grad.dim()))) grad_norm = grad_norm.view(-1, *[1] * (grad.dim() - 1)) grad_normalized", "logits = torch.squeeze(logits, dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # Add to the epoch", "= copy.deepcopy(self.model) # # print('Epoch: {}, CE Loss: {}, AdvLoss: {}, {}: {}'.format(", "> 1: # logits_start = logits_start[unl_ind] # # AdvLoss # if not half:", "half) # epoch_adv_loss += adv_loss # # loss = ce_loss + adv_loss *", "# # Make use of broadcast to project h # proj = (alpha", "- targets).to(self.device) new_targets = torch.squeeze(new_targets) new_targets = new_targets.to(torch.float) if half: logits = self.model.forward_end(x_adv_sampled)", ">= self.warmup_epochs and unl_cnt > 1: logits_start = logits_start[unl_ind] # AdvLoss if not", "PU_DROCC(PUModelRandomBatch): def __init__(self, model=Net, lam=0.5, radius=8, gamma=2, warmup_epochs=6, ascent_step_size=0.001, ascent_num_steps=50, half=True): super().__init__(model, 0)", "# self.model.eval() # label_score = [] # batch_idx = -1 # for data,", "# total_epochs=30, # only_ce_epochs=6, # ascent_step_size=0.001, # ascent_num_steps=50, # gamma_lr=1, # batch_size=128, #", "# batch_idx = -1 # for data, target, _ in train_loader: # batch_idx", "(new_targets)) # # return adv_loss # # def save(self, path): # torch.save(self.model.state_dict(), os.path.join(path,", "target = data.to(device), target.to(device) # Data Processing data = data.to(torch.float) target = target.to(torch.float)", "# def run_train(self, # train_data, # test_data, # lamda=0.5, # radius=8, # gamma=2,", "use of broadcast to project h proj = (alpha / norm_h).view(-1, *[1] *", "# \"\"\"Evaluate the model on the given test dataset. # Parameters # ----------", "only_ce_epochs=6, # ascent_step_size=0.001, # ascent_num_steps=50, # gamma_lr=1, # batch_size=128, # half=True): # #", "test_loader = torch.utils.data.DataLoader(test_data, # batch_size=batch_size, # shuffle=True) # # for epoch in range(total_epochs):", "---------- # test_loader: Dataloader object for the test dataset. # metric: Metric used", "# ---------- # test_loader: Dataloader object for the test dataset. # metric: Metric", "F.binary_cross_entropy_with_logits(adv_pred, (new_targets)) # # return adv_loss # # def save(self, path): # torch.save(self.model.state_dict(),", "AdvLoss # epoch_ce_loss = 0 # Cross entropy Loss # # batch_idx =", "x_adv + x_train_data # # for step in range(self.ascent_num_steps): # with torch.enable_grad(): #", "= logits # label_score += list(zip(target.cpu().data.numpy().tolist(), # scores.cpu().data.numpy().tolist())) # # Compute test score", "= proj * h # x_adv_sampled = x_train_data + h # These adv_points", "= self.model.half_forward_end(logits_start[lab_ind]) # # logits = torch.squeeze(logits, dim=1) # ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind])", "# Add to the epoch variable for printing average CE Loss ''' Adversarial", "else: adv_loss = self.one_class_adv_loss(logits_start[unl_ind].detach(), self.half) loss = ce_loss + adv_loss * self.lam else:", "# if test_score > self.best_score: # self.best_score = test_score # best_model = copy.deepcopy(self.model)", "radius=8, gamma=2, warmup_epochs=6, ascent_step_size=0.001, ascent_num_steps=50, half=True): super().__init__(model, 0) self.lam = lam self.radius =", "0 Parameters ---------- x_train_data: Batch of data to compute loss on. \"\"\" batch_size", "= F.binary_cross_entropy_with_logits(logits, target) # # Add to the epoch variable for printing average", "data.to(device), target.to(device) # data = data.to(torch.float) # target = target.to(torch.float) # target =", "loss = ce_loss # # # Backprop # loss.backward() # self.optimizer.step() # #", "* (grad.dim() - 1)) # grad_normalized = grad / grad_norm # with torch.no_grad():", "with torch.no_grad(): x_adv_sampled.add_(self.ascent_step_size * grad_normalized) if (step + 1) % 10 == 0:", "models.classifiers import Net device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") # code DROCC", "average CE Loss # epoch_ce_loss += ce_loss # # ''' # Adversarial Loss", "self.test(test_loader) # if test_score > self.best_score: # self.best_score = test_score # best_model =", "# Project the normal points to the set N_i(r) h = x_adv_sampled -", "dim=tuple(range(1, grad.dim()))) # grad_norm = grad_norm.view(-1, *[1] * (grad.dim() - 1)) # grad_normalized", "= optim.lr_scheduler.ExponentialLR(self.optimizer, gamma=gamma_lr) # # train_loader = torch.utils.data.DataLoader(train_data, # batch_size=batch_size, # shuffle=True) #", "data points (label==1). # ''' # if epoch >= only_ce_epochs and unl_cnt >", "# if not half: # adv_loss = self.one_class_adv_loss(data[target == 1].detach(), target[target == 1],", "# # Make the weights trainable # self.model.train() # # # Placeholder for", "loss = ce_loss return loss def one_class_adv_loss(self, x_train_data, half=True): \"\"\"Computes the adversarial loss:", "given test dataset. # Parameters # ---------- # test_loader: Dataloader object for the", "= -1 # for data, target, _ in train_loader: # batch_idx += 1", "= ascent_num_steps # self.ascent_step_size = ascent_step_size # self.lamda = lamda # self.radius =", "as F import torch.utils.data import torch.utils.data from models.base_models import OCModel, PUModelRandomBatch from models.classifiers", "# if epoch >= only_ce_epochs and unl_cnt > 1: # logits_start = logits_start[unl_ind]", "= (1 - targets).to(self.device) new_targets = torch.squeeze(new_targets) new_targets = new_targets.to(torch.float) if half: logits", "self.radius).to(device) # Make use of broadcast to project h proj = (alpha /", "training data points 2) Gradient ascent to find the most optimal point in", "new_targets) return adv_loss # class PU_DROCC(nn.Module): # def __init__(self, ): # super().__init__() #", "of radius R and gamma * R (set N_i(r)) 4) Pass the calculated", "# We will perform SGD on these to find the adversarial points x_adv", "# ''' # if epoch >= only_ce_epochs and unl_cnt > 1: # logits_start", "are now on the surface of hyper-sphere # # if half: # adv_pred", "logits_start = logits_start[target == 1] # AdvLoss if not self.half: adv_loss = self.one_class_adv_loss(data[target", "print('Epoch: {}, CE Loss: {}, AdvLoss: {}, {}: {}'.format( # epoch, epoch_ce_loss.item(), epoch_adv_loss.item(),", "by maximizing the CE loss wrt label 0 3) Project the points between", "the training data # We will perform SGD on these to find the", "test_metric # # def one_class_adv_loss(self, x_train_data, targets, half=True): # \"\"\"Computes the adversarial loss:", "self.radius).to(device) # # Make use of broadcast to project h # proj =", "# train_data, # test_data, # lamda=0.5, # radius=1, # gamma=2, # verbose=False, #", "half def batch_loss(self, batch): data, target = batch[0], batch[2] data, target = data.to(device),", "** 2, dim=tuple(range(1, h.dim())))) alpha = torch.clamp(norm_h, self.radius, self.gamma * self.radius).to(device) # Make", "# \"\"\"Computes the adversarial loss: # 1) Sample points initially at random around", "self.ascent_num_steps = ascent_num_steps self.half = half def batch_loss(self, batch): data, target = batch[0],", "trainable # self.model.train() # # # Placeholder for the respective 2 loss values", "grad_norm # with torch.no_grad(): # x_adv_sampled.add_(self.ascent_step_size * grad_normalized) # # if (step +", "is done by maximizing the CE loss wrt label 0 3) Project the", "# self.lamda = lamda # self.radius = radius # self.gamma = gamma #", "R # (set N_i(r)) # 4) Pass the calculated adversarial points through the", "\"\"\" batch_size = len(x_train_data) # Randomly sample points around the training data #", "Average CE Loss # epoch_adv_loss = epoch_adv_loss / (batch_idx + 1) # Average", "scores = logits # label_score += list(zip(target.cpu().data.numpy().tolist(), # scores.cpu().data.numpy().tolist())) # # Compute test", "adv_loss * self.lamda # else: # # If only CE based training has", "sample points around the training data # We will perform SGD on these", "(step + 1) % 10 == 0: # # Project the normal points", "points 2) Gradient ascent to find the most optimal point in set N_i(r)", "for step in range(self.ascent_num_steps): # with torch.enable_grad(): # # new_targets = torch.zeros(batch_size, 1).to(device)", "torch.sqrt(torch.sum(h ** 2, # dim=tuple(range(1, h.dim())))) # alpha = torch.clamp(norm_h, self.radius, # self.gamma", "use of broadcast to project h # proj = (alpha / norm_h).view(-1, *[1]", "the points between spheres of radius R and gamma * R (set N_i(r))", "point in set N_i(r) # classified as +ve (label=0). This is done by", "__init__(self, model=Net, lam=0.5, radius=8, gamma=2, warmup_epochs=6, ascent_step_size=0.001, ascent_num_steps=50, half=True): super().__init__(model, 0) self.lam =", "self.model.half_forward_start(data) # logits = self.model.half_forward_end(logits_start[lab_ind]) # # logits = torch.squeeze(logits, dim=1) # ce_loss", "adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), self.half) else: adv_loss = self.one_class_adv_loss(logits_start[unl_ind].detach(), self.half) loss = ce_loss +", "grad_norm = grad_norm.view(-1, *[1] * (grad.dim() - 1)) # grad_normalized = grad /", "= torch.tensor([0]).type(torch.float32).to(device) # AdvLoss # epoch_ce_loss = 0 # Cross entropy Loss #", "= self.one_class_adv_loss(data[target == 1].detach(), self.half) else: adv_loss = self.one_class_adv_loss(logits_start.detach(), self.half) loss = ce_loss", "1) Sample points initially at random around the positive training data points 2)", "# AdvLoss # if not half: # adv_loss = self.one_class_adv_loss(data[target == 1].detach(), target[target", "# Extract the logits for cross entropy loss logits_start = self.model.forward_start(data) logits =", "# print('\\nBest test {}: {}'.format( # 'AUC', self.best_score # )) # # def", "(h.dim() - 1)) h = proj * h x_adv_sampled = x_train_data + h", "x_adv_sampled.add_(self.ascent_step_size * grad_normalized) if (step + 1) % 10 == 0: # Project", "self.model.forward_end(x_adv_sampled) else: adv_pred = self.model(x_adv_sampled) adv_pred = torch.squeeze(adv_pred, dim=1) adv_loss = F.binary_cross_entropy_with_logits(adv_pred, new_targets)", "# self.ascent_num_steps = ascent_num_steps # self.ascent_step_size = ascent_step_size # self.lamda = lamda #", "and unl_cnt > 1: logits_start = logits_start[unl_ind] # AdvLoss if not self.half: adv_loss", "+ 1) # Average AdvLoss # # if verbose: # test_score = self.test(test_loader)", "# # self.best_score = -np.inf # best_model = None # self.ascent_num_steps = ascent_num_steps", "1) % 10 == 0: # # Project the normal points to the", "# Placeholder for the respective 2 loss values # epoch_adv_loss = torch.tensor([0]).type(torch.float32).to(device) #", "torch.nn.functional as F import torch.utils.data import torch.utils.data from models.base_models import OCModel, PUModelRandomBatch from", "positive data points (label==1). ''' if self.epoch >= self.warmup_epochs and unl_cnt > 1:", "the epoch variable for printing average CE Loss # epoch_ce_loss += ce_loss #", "dataset. # metric: Metric used for evaluation (AUC / F1). # \"\"\" #", "* (h.dim() - 1)) h = proj * h x_adv_sampled = x_train_data +", "= self.model.half_forward_end(logits_start) # # logits = torch.squeeze(logits, dim=1) # ce_loss = F.binary_cross_entropy_with_logits(logits, target)", "epoch_adv_loss / (batch_idx + 1) # Average AdvLoss # # if verbose: #", "if not half: # adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), target[unl_ind], half) # else: # adv_loss", "of broadcast to project h proj = (alpha / norm_h).view(-1, *[1] * (h.dim()", "# # Parameters # ---------- # x_train_data: Batch of data to compute loss", "= self.model.half_forward_start(data) # logits = self.model.half_forward_end(logits_start) # # logits = torch.squeeze(logits, dim=1) #", "-1 # for data, _, target in train_loader: # batch_idx += 1 #", "AdvLoss # if not half: # adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), target[unl_ind], half) # else:", "new_loss = F.binary_cross_entropy_with_logits(logits, new_targets) grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0] grad_norm = torch.norm(grad, p=2, dim=tuple(range(1,", ">= self.warmup_epochs: logits_start = logits_start[target == 1] # AdvLoss if not self.half: adv_loss", "grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim()))) # grad_norm = grad_norm.view(-1, *[1] * (grad.dim()", "# target = target.to(torch.float) # target = torch.squeeze(target) # # self.optimizer.zero_grad() # #", "x_adv_sampled = x_train_data + h # These adv_points are now on the surface", "self.one_class_adv_loss(data[target == 1].detach(), self.half) else: adv_loss = self.one_class_adv_loss(logits_start.detach(), self.half) loss = ce_loss +", "batch_size=batch_size, # shuffle=True) # # test_loader = torch.utils.data.DataLoader(test_data, # batch_size=batch_size, # shuffle=True) #", "# # def one_class_adv_loss(self, x_train_data, targets, half=True): # \"\"\"Computes the adversarial loss: #", "# logits = self.model(x_adv_sampled) # # logits = torch.squeeze(logits, dim=1) # new_loss =", "has to be done loss = ce_loss return loss def one_class_adv_loss(self, x_train_data, half=True):", "grad_norm = grad_norm.view(-1, *[1] * (grad.dim() - 1)) grad_normalized = grad / grad_norm", "epoch, epoch_ce_loss.item(), epoch_adv_loss.item(), # 'AUC', test_score)) # lr_scheduler.step() # if verbose: # self.model", "now on the surface of hyper-sphere if half: adv_pred = self.model.forward_end(x_adv_sampled) else: adv_pred", "find the adversarial points # x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() # x_adv_sampled = x_adv +", "on the given test dataset. # Parameters # ---------- # test_loader: Dataloader object", "lr_scheduler.step() # if verbose: # self.model = copy.deepcopy(best_model) # print('\\nBest test {}: {}'.format(", "= self.model.forward_start(data) logits = self.model.forward_end(logits_start) logits = torch.squeeze(logits, dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits, target)", "the positive data points (label==1). ''' if self.epoch >= self.warmup_epochs: logits_start = logits_start[target", "len(x_train_data) # Randomly sample points around the training data # We will perform", "ascent_num_steps=50, half=True): super().__init__(model, 0) self.lam = lam self.radius = radius self.gamma = gamma", "to project h # proj = (alpha / norm_h).view(-1, *[1] * (h.dim() -", "# # logits = self.model(data) # logits = torch.squeeze(logits, dim=1) # sigmoid_logits =", "= x_adv_sampled - x_train_data norm_h = torch.sqrt(torch.sum(h ** 2, dim=tuple(range(1, h.dim())))) alpha =", "test dataset. # metric: Metric used for evaluation (AUC / F1). # \"\"\"", "= roc_auc_score(labels, scores) # if metric == 'alpha': # test_metric = (scores >", "dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits, target) # Add to the epoch variable for printing", "Randomly sample points around the training data # # We will perform SGD", "+= list(zip(target.cpu().data.numpy().tolist(), # scores.cpu().data.numpy().tolist())) # # Compute test score # labels, scores =", "initially at random around the positive training # data points # 2) Gradient", "models.base_models import OCModel, PUModelRandomBatch from models.classifiers import Net device = torch.device(\"cuda\" if torch.cuda.is_available()", "return adv_loss # # def save(self, path): # torch.save(self.model.state_dict(), os.path.join(path, 'model.pt')) # #", "# ce_loss = F.binary_cross_entropy_with_logits(logits, target) # # Add to the epoch variable for", "is borrowed from https://github.com/microsoft/EdgeML class DROCC(OCModel): def __init__(self, model=Net, lam=0.5, radius=8, gamma=2, warmup_epochs=6,", "training data # # We will perform SGD on these to find the", "== 1] # AdvLoss if not self.half: adv_loss = self.one_class_adv_loss(data[target == 1].detach(), self.half)", "= ce_loss return loss def one_class_adv_loss(self, x_train_data, half=True): \"\"\"Computes the adversarial loss: 1)", "the adversarial loss: 1) Sample points initially at random around the positive training", "dim=1) # new_loss = F.binary_cross_entropy_with_logits(logits, new_targets) # # grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0] #", "for cross entropy loss logits_start = self.model.forward_start(data) logits = self.model.forward_end(logits_start) logits = torch.squeeze(logits,", "points through the model, and calculate the CE loss wrt target class 0", "lab_cnt = max(lab_ind.sum(), 1) unl_cnt = max(unl_ind.sum(), 1) # Extract the logits for", "* h x_adv_sampled = x_train_data + h # These adv_points are now on", "Loss # # batch_idx = -1 # for data, target, _ in train_loader:", "torch.zeros(batch_size, 1).to(device) # # new_targets = (1 - targets).to(self.device) # new_targets = torch.squeeze(new_targets)", "# # new_targets = (1 - targets).to(self.device) # new_targets = torch.squeeze(new_targets) # new_targets", "= -1 # for data, target, _ in test_loader: # batch_idx += 1", "data, target = data.to(device), target.to(device) lab_ind = target == 1 unl_ind = target", "# scores = np.array(scores) # if metric == 'AUC': # test_metric = roc_auc_score(labels,", "+ 1) % 10 == 0: # # Project the normal points to", "# def one_class_adv_loss(self, x_train_data, targets, half=True): # \"\"\"Computes the adversarial loss: # 1)", "on these to find the adversarial points x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() x_adv_sampled = x_adv", "= gamma # # self.optimizer = optim.Adam(self.model.parameters(), lr=learning_rate) # lr_scheduler = optim.lr_scheduler.ExponentialLR(self.optimizer, gamma=gamma_lr)", "0 # Cross entropy Loss # # batch_idx = -1 # for data,", "np.array(scores) # if metric == 'AUC': # test_metric = roc_auc_score(labels, scores) # if", "Adversarial Loss is calculated only for the positive data points (label==1). # '''", "with torch.enable_grad(): new_targets = torch.zeros(batch_size, 1).to(device) # new_targets = (1 - targets).to(self.device) new_targets", "# logits_start = logits_start[unl_ind] # # AdvLoss # if not half: # adv_loss", "# x_adv_sampled = x_train_data + h # These adv_points are now on the", "from models.classifiers import Net device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") # code", "# learning_rate=5e-4, # total_epochs=20, # only_ce_epochs=2, # ascent_step_size=5e-6, # ascent_num_steps=10, # gamma_lr=0.96, #", "target == 1 # unl_ind = target == 0 # # # lab_cnt", "# if half: # logits = self.model.half_forward_end(x_adv_sampled) # else: # logits = self.model(x_adv_sampled)", "# only_ce_epochs=6, # ascent_step_size=0.001, # ascent_num_steps=50, # gamma_lr=1, # batch_size=128, # half=True): #", "in range(total_epochs): # # Make the weights trainable # self.model.train() # # #", "= self.test(test_loader) # if test_score > self.best_score: # self.best_score = test_score # best_model", "logits_start = logits_start[target == 1] # # AdvLoss # if not half: #", "the model, and calculate the CE loss wrt target class 0 Parameters ----------", "# half=True): # # self.best_score = -np.inf # best_model = None # self.ascent_num_steps", "target = batch[0], batch[2] data, target = data.to(device), target.to(device) # Data Processing data", "= self.model.half_forward_start(data) # logits = self.model.half_forward_end(logits_start[lab_ind]) # # logits = torch.squeeze(logits, dim=1) #", "= self.model.half_forward_end(x_adv_sampled) # else: # logits = self.model(x_adv_sampled) # # logits = torch.squeeze(logits,", "find the most optimal point in set N_i(r) classified as +ve (label=0). This", "= self.one_class_adv_loss(data[unl_ind].detach(), self.half) else: adv_loss = self.one_class_adv_loss(logits_start[unl_ind].detach(), self.half) loss = ce_loss + adv_loss", "be done loss = ce_loss return loss def one_class_adv_loss(self, x_train_data, half=True): \"\"\"Computes the", "''' # if epoch >= only_ce_epochs and unl_cnt > 1: # logits_start =", "(batch_idx + 1) # Average AdvLoss # # if verbose: # test_score =", "Extract the logits for cross entropy loss logits_start = self.model.forward_start(data) logits = self.model.forward_end(logits_start)", "self.ascent_num_steps = ascent_num_steps # self.ascent_step_size = ascent_step_size # self.lamda = lamda # self.radius", "# # AdvLoss # if not half: # adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), target[unl_ind], half)", "gamma * R # (set N_i(r)) # 4) Pass the calculated adversarial points", "\"cpu\") # code DROCC is borrowed from https://github.com/microsoft/EdgeML class DROCC(OCModel): def __init__(self, model=Net,", "= x_adv + x_train_data for step in range(self.ascent_num_steps): with torch.enable_grad(): new_targets = torch.zeros(batch_size,", "label 0 # 3) Project the points between spheres of radius R and", "print('\\nBest test {}: {}'.format( # 'AUC', self.best_score # )) # # def test(self,", "most optimal point in set N_i(r) # classified as +ve (label=0). This is", "logits = torch.squeeze(logits, dim=1) # ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # # Add to", "2) Gradient ascent to find the most optimal point in set N_i(r) #", "the positive data points (label==1). # ''' # if epoch >= only_ce_epochs: #", "torch.tensor([0]).type(torch.float32).to(device) # AdvLoss # epoch_ce_loss = 0 # Cross entropy Loss # #", "= logits_start[unl_ind] # # AdvLoss # if not half: # adv_loss = self.one_class_adv_loss(data[unl_ind].detach(),", "cross entropy loss logits_start = self.model.forward_start(data) logits = self.model.forward_end(logits_start) logits = torch.squeeze(logits, dim=1)", "loss = ce_loss + adv_loss * self.lam else: # If only CE based", "done by maximizing # the CE loss wrt label 0 # 3) Project", "self.model.half_forward_end(x_adv_sampled) # else: # adv_pred = self.model(x_adv_sampled) # # adv_pred = torch.squeeze(adv_pred, dim=1)", "# # for step in range(self.ascent_num_steps): # with torch.enable_grad(): # # new_targets =", "# test_score = self.test(test_loader) # if test_score > self.best_score: # self.best_score = test_score", "logits = torch.squeeze(logits, dim=1) # ce_loss = F.binary_cross_entropy_with_logits(logits, target) # # Add to", "# test_loader = torch.utils.data.DataLoader(test_data, # batch_size=batch_size, # shuffle=True) # # for epoch in", "# unl_cnt = max(unl_ind.sum(), 1) # # # Extract the logits for cross", "lr=learning_rate) # lr_scheduler = optim.lr_scheduler.ExponentialLR(self.optimizer, gamma=gamma_lr) # # train_loader = torch.utils.data.DataLoader(train_data, # batch_size=batch_size,", "# AdvLoss if not self.half: adv_loss = self.one_class_adv_loss(data[target == 1].detach(), self.half) else: adv_loss", "Loss # # batch_idx = -1 # for data, _, target in train_loader:", "CE based training has to be done loss = ce_loss return loss def", "adv_loss = self.one_class_adv_loss(logits_start.detach(), self.half) loss = ce_loss + adv_loss * self.lam else: #", "at random around the positive training data points 2) Gradient ascent to find", "(1 - targets).to(self.device) # new_targets = torch.squeeze(new_targets) # new_targets = new_targets.to(torch.float) # #", "torch.no_grad(): # x_adv_sampled.add_(self.ascent_step_size * grad_normalized) # # if (step + 1) % 10", "is calculated only for the positive data points (label==1). # ''' # if", "= F.binary_cross_entropy_with_logits(adv_pred, (new_targets)) # # return adv_loss # # def save(self, path): #", "= copy.deepcopy(best_model) # print('\\nBest test {}: {}'.format( # 'AUC', self.best_score # )) #", "# # # Extract the logits for cross entropy loss # logits_start =", "new_targets) grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0] grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim()))) grad_norm =", "on these to find the adversarial points # x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() # x_adv_sampled", "calculate the CE loss wrt target class 0 Parameters ---------- x_train_data: Batch of", "from https://github.com/microsoft/EdgeML class DROCC(OCModel): def __init__(self, model=Net, lam=0.5, radius=8, gamma=2, warmup_epochs=6, ascent_step_size=0.001, ascent_num_steps=50,", "x_adv_sampled - x_train_data norm_h = torch.sqrt(torch.sum(h ** 2, dim=tuple(range(1, h.dim())))) alpha = torch.clamp(norm_h,", "# # def run_train(self, # train_data, # test_data, # lamda=0.5, # radius=8, #", "x_train_data, targets, half=True): # \"\"\"Computes the adversarial loss: # 1) Sample points initially", "if half: # adv_pred = self.model.half_forward_end(x_adv_sampled) # else: # adv_pred = self.model(x_adv_sampled) #", "dim=1) new_loss = F.binary_cross_entropy_with_logits(logits, new_targets) grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0] grad_norm = torch.norm(grad, p=2,", "training has to be done loss = ce_loss return loss def one_class_adv_loss(self, x_train_data,", "R and gamma * R # (set N_i(r)) # 4) Pass the calculated", "1).to(device) # # new_targets = (1 - targets).to(self.device) # new_targets = torch.squeeze(new_targets) #", "verbose: # test_score = self.test(test_loader) # if test_score > self.best_score: # self.best_score =", "= torch.autograd.grad(new_loss, [x_adv_sampled])[0] # grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim()))) # grad_norm =", "Net device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\") # code DROCC is borrowed", "self.best_score: # self.best_score = test_score # best_model = copy.deepcopy(self.model) # # print('Epoch: {},", "self.half = half def batch_loss(self, batch): data, target = batch[0], batch[2] data, target", "= torch.norm(grad, p=2, dim=tuple(range(1, grad.dim()))) # grad_norm = grad_norm.view(-1, *[1] * (grad.dim() -", "data, target, _ in test_loader: # batch_idx += 1 # data, target =", "ascent to find the most optimal point in set N_i(r) classified as +ve", "new_targets = torch.zeros(batch_size, 1).to(device) # new_targets = (1 - targets).to(self.device) new_targets = torch.squeeze(new_targets)", "# target = torch.squeeze(target) # # self.optimizer.zero_grad() # # # Extract the logits", "of broadcast to project h # proj = (alpha / norm_h).view(-1, *[1] *", "one_class_adv_loss(self, x_train_data, half=True): \"\"\"Computes the adversarial loss: 1) Sample points initially at random", "= ascent_num_steps self.half = half def batch_loss(self, batch): data, target = batch[0], batch[2]", "points (label==1). # ''' # if epoch >= only_ce_epochs: # logits_start = logits_start[target", "to find the adversarial points # x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() # x_adv_sampled = x_adv", "loss logits_start = self.model.forward_start(data) logits = self.model.forward_end(logits_start[lab_ind]) logits = torch.squeeze(logits, dim=1) ce_loss =", "SGD on these to find the adversarial points x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() x_adv_sampled =", "points around the training data # # We will perform SGD on these", "points (label==1). ''' if self.epoch >= self.warmup_epochs: logits_start = logits_start[target == 1] #", "def test(self, test_loader, metric='AUC'): # \"\"\"Evaluate the model on the given test dataset.", "h = x_adv_sampled - x_train_data # norm_h = torch.sqrt(torch.sum(h ** 2, # dim=tuple(range(1,", "Add to the epoch variable for printing average CE Loss # epoch_ce_loss +=", "# lamda=0.5, # radius=1, # gamma=2, # verbose=False, # learning_rate=5e-4, # total_epochs=20, #", "DROCC(OCModel): def __init__(self, model=Net, lam=0.5, radius=8, gamma=2, warmup_epochs=6, ascent_step_size=0.001, ascent_num_steps=50, half=True): super().__init__(model, 0)", "test_data, # lamda=0.5, # radius=1, # gamma=2, # verbose=False, # learning_rate=5e-4, # total_epochs=20,", "2) Gradient ascent to find the most optimal point in set N_i(r) classified", "# # batch_idx = -1 # for data, target, _ in train_loader: #", "logits = self.model(data) # logits = torch.squeeze(logits, dim=1) # sigmoid_logits = torch.sigmoid(logits) #", "1].detach(), target[target == 1], half) # else: # adv_loss = self.one_class_adv_loss(logits_start.detach(), target[target ==", "self.model.train() # # # Placeholder for the respective 2 loss values # epoch_adv_loss", "= len(x_train_data) # Randomly sample points around the training data # We will", "N_i(r) # h = x_adv_sampled - x_train_data # norm_h = torch.sqrt(torch.sum(h ** 2,", "(AUC / F1). # \"\"\" # self.model.eval() # label_score = [] # batch_idx", "# shuffle=True) # # for epoch in range(total_epochs): # # Make the weights", "random around the positive training # data points # 2) Gradient ascent to", "self.one_class_adv_loss(logits_start.detach(), target[unl_ind], half) # epoch_adv_loss += adv_loss # # loss = ce_loss +", "Compute test score # labels, scores = zip(*label_score) # labels = np.array(labels) #", "self.lam = lam self.radius = radius self.gamma = gamma self.warmup_epochs = warmup_epochs self.ascent_step_size", "for the test dataset. # metric: Metric used for evaluation (AUC / F1).", "h proj = (alpha / norm_h).view(-1, *[1] * (h.dim() - 1)) h =", "-np.inf # best_model = None # self.ascent_num_steps = ascent_num_steps # self.ascent_step_size = ascent_step_size", "= F.binary_cross_entropy_with_logits(adv_pred, new_targets) return adv_loss # class DROCC(nn.Module): # def __init__(self, ): #", "1], half) # epoch_adv_loss += adv_loss # # loss = ce_loss + adv_loss", "# with torch.enable_grad(): # # new_targets = torch.zeros(batch_size, 1).to(device) # # new_targets =", "else: # logits = self.model(x_adv_sampled) # # logits = torch.squeeze(logits, dim=1) # new_loss", "1].detach(), self.half) else: adv_loss = self.one_class_adv_loss(logits_start.detach(), self.half) loss = ce_loss + adv_loss *", "# labels, scores = zip(*label_score) # labels = np.array(labels) # scores = np.array(scores)", "self.model.half_forward_start(data) # logits = self.model.half_forward_end(logits_start) # # logits = torch.squeeze(logits, dim=1) # ce_loss", "done by maximizing the CE loss wrt label 0 3) Project the points", "# adv_loss = self.one_class_adv_loss(logits_start.detach(), target[target == 1], half) # epoch_adv_loss += adv_loss #", "= radius # self.gamma = gamma # # self.optimizer = optim.Adam(self.model.parameters(), lr=learning_rate) #", "# 1) Sample points initially at random around the positive training # data", "loss wrt label 0 3) Project the points between spheres of radius R", "Loss # epoch_adv_loss = epoch_adv_loss / (batch_idx + 1) # Average AdvLoss #", "only for the positive data points (label==1). # ''' # if epoch >=", "broadcast to project h # proj = (alpha / norm_h).view(-1, *[1] * (h.dim()", "= new_targets.to(torch.float) if half: logits = self.model.forward_end(x_adv_sampled) else: logits = self.model(x_adv_sampled) logits =", "= ce_loss + adv_loss * self.lam else: # If only CE based training", "logits = self.model.half_forward_end(logits_start[lab_ind]) # # logits = torch.squeeze(logits, dim=1) # ce_loss = F.binary_cross_entropy_with_logits(logits,", "self.one_class_adv_loss(logits_start.detach(), self.half) loss = ce_loss + adv_loss * self.lam else: # If only", "def save(self, path): # torch.save(self.model.state_dict(), os.path.join(path, 'model.pt')) # # def load(self, path): #", "hyper-sphere if half: adv_pred = self.model.forward_end(x_adv_sampled) else: adv_pred = self.model(x_adv_sampled) adv_pred = torch.squeeze(adv_pred,", "x_adv_sampled - x_train_data # norm_h = torch.sqrt(torch.sum(h ** 2, # dim=tuple(range(1, h.dim())))) #", "in train_loader: # batch_idx += 1 # data, target = data.to(device), target.to(device) #", "def one_class_adv_loss(self, x_train_data, targets, half=True): # \"\"\"Computes the adversarial loss: # 1) Sample", "# data points # 2) Gradient ascent to find the most optimal point", "data to compute loss on. # \"\"\" # batch_size = len(x_train_data) # #", "(1 - targets).to(self.device) new_targets = torch.squeeze(new_targets) new_targets = new_targets.to(torch.float) if half: logits =", "# # if verbose: # test_score = self.test(test_loader) # if test_score > self.best_score:", "on. # \"\"\" # batch_size = len(x_train_data) # # Randomly sample points around", "adv_loss = self.one_class_adv_loss(data[target == 1].detach(), target[target == 1], half) # else: # adv_loss", "> 0.5).mean() # return test_metric # # def one_class_adv_loss(self, x_train_data, targets, half=True): #", "import torch.utils.data import torch.utils.data from models.base_models import OCModel, PUModelRandomBatch from models.classifiers import Net", "# if epoch >= only_ce_epochs: # logits_start = logits_start[target == 1] # #", "# batch_idx = -1 # for data, target, _ in test_loader: # batch_idx", "lam=0.5, radius=8, gamma=2, warmup_epochs=6, ascent_step_size=0.001, ascent_num_steps=50, half=True): super().__init__(model, 0) self.lam = lam self.radius", "x_train_data # # for step in range(self.ascent_num_steps): # with torch.enable_grad(): # # new_targets", "= half def batch_loss(self, batch): data, target = batch[0], batch[2] data, target =", "ascent_num_steps # self.ascent_step_size = ascent_step_size # self.lamda = lamda # self.radius = radius", "N_i(r) # classified as +ve (label=0). This is done by maximizing # the", "= self.model(x_adv_sampled) # # adv_pred = torch.squeeze(adv_pred, dim=1) # adv_loss = F.binary_cross_entropy_with_logits(adv_pred, (new_targets))", "1) unl_cnt = max(unl_ind.sum(), 1) # Extract the logits for cross entropy loss", "random around the positive training data points 2) Gradient ascent to find the", "between spheres of radius R and gamma * R (set N_i(r)) 4) Pass", "N_i(r)) 4) Pass the calculated adversarial points through the model, and calculate the", "logits = self.model(x_adv_sampled) # # logits = torch.squeeze(logits, dim=1) # new_loss = F.binary_cross_entropy_with_logits(logits,", "class PU_DROCC(PUModelRandomBatch): def __init__(self, model=Net, lam=0.5, radius=8, gamma=2, warmup_epochs=6, ascent_step_size=0.001, ascent_num_steps=50, half=True): super().__init__(model,", "loss wrt target class 0 Parameters ---------- x_train_data: Batch of data to compute", "the adversarial loss: # 1) Sample points initially at random around the positive", "# logits_start = self.model.half_forward_start(data) # logits = self.model.half_forward_end(logits_start[lab_ind]) # # logits = torch.squeeze(logits,", "data.to(torch.float) target = target.to(torch.float) target = torch.squeeze(target) # Extract the logits for cross", "torch.squeeze(target) # Extract the logits for cross entropy loss logits_start = self.model.forward_start(data) logits", "# # if half: # logits = self.model.half_forward_end(x_adv_sampled) # else: # logits =", "torch.squeeze(logits, dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # Add to the epoch variable for", "# # def test(self, test_loader, metric='AUC'): # \"\"\"Evaluate the model on the given", "to find the most optimal point in set N_i(r) classified as +ve (label=0).", "Sample points initially at random around the positive training data points 2) Gradient", "variable for printing average CE Loss # epoch_ce_loss += ce_loss # # '''", "---------- x_train_data: Batch of data to compute loss on. \"\"\" batch_size = len(x_train_data)", "if not self.half: adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), self.half) else: adv_loss = self.one_class_adv_loss(logits_start[unl_ind].detach(), self.half) loss", "# # logits = torch.squeeze(logits, dim=1) # new_loss = F.binary_cross_entropy_with_logits(logits, new_targets) # #", "epoch_ce_loss += ce_loss # # ''' # Adversarial Loss is calculated only for", "self.model.half_forward_end(logits_start) # # logits = torch.squeeze(logits, dim=1) # ce_loss = F.binary_cross_entropy_with_logits(logits, target) #", "class PU_DROCC(nn.Module): # def __init__(self, ): # super().__init__() # # self.model = CIFAR10_LeNet()", "= F.binary_cross_entropy_with_logits(logits, new_targets) # # grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0] # grad_norm = torch.norm(grad,", "self.optimizer = optim.Adam(self.model.parameters(), lr=learning_rate) # lr_scheduler = optim.lr_scheduler.ExponentialLR(self.optimizer, gamma=gamma_lr) # # train_loader =", "loss on. \"\"\" batch_size = len(x_train_data) # Randomly sample points around the training", "1], half) # else: # adv_loss = self.one_class_adv_loss(logits_start.detach(), target[target == 1], half) #", "if torch.cuda.is_available() else \"cpu\") # code DROCC is borrowed from https://github.com/microsoft/EdgeML class DROCC(OCModel):", "new_loss = F.binary_cross_entropy_with_logits(logits, new_targets) # # grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0] # grad_norm =", "model, # and calculate the CE loss wrt target class 0 # #", "model, and calculate the CE loss wrt target class 0 Parameters ---------- x_train_data:", "= data.to(torch.float) target = target.to(torch.float) target = torch.squeeze(target) # Extract the logits for", "# learning_rate=1e-3, # total_epochs=30, # only_ce_epochs=6, # ascent_step_size=0.001, # ascent_num_steps=50, # gamma_lr=1, #", "'model.pt')) # # def load(self, path): # self.model.load_state_dict(torch.load(os.path.join(path, 'model.pt'))) class PU_DROCC(PUModelRandomBatch): def __init__(self,", "norm_h).view(-1, *[1] * (h.dim() - 1)) h = proj * h x_adv_sampled =", "(label=0). This is done by maximizing the CE loss wrt label 0 3)", "find the adversarial points x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() x_adv_sampled = x_adv + x_train_data for", "evaluation (AUC / F1). # \"\"\" # self.model.eval() # label_score = [] #", "adversarial points x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() x_adv_sampled = x_adv + x_train_data for step in", "= self.one_class_adv_loss(logits_start.detach(), target[target == 1], half) # epoch_adv_loss += adv_loss # # loss", "Project the points between spheres of radius R and gamma * R (set", "torch.squeeze(logits, dim=1) # sigmoid_logits = torch.sigmoid(logits) # scores = logits # label_score +=", "# train_loader = torch.utils.data.DataLoader(train_data, # batch_size=batch_size, # shuffle=True) # # test_loader = torch.utils.data.DataLoader(test_data,", "for step in range(self.ascent_num_steps): with torch.enable_grad(): new_targets = torch.zeros(batch_size, 1).to(device) # new_targets =", "+ adv_loss * self.lamda # else: # # If only CE based training", "= data.to(device), target.to(device) # data = data.to(torch.float) # target = target.to(torch.float) # target", "# if (step + 1) % 10 == 0: # # Project the", "DROCC is borrowed from https://github.com/microsoft/EdgeML class DROCC(OCModel): def __init__(self, model=Net, lam=0.5, radius=8, gamma=2,", "= self.model(x_adv_sampled) adv_pred = torch.squeeze(adv_pred, dim=1) adv_loss = F.binary_cross_entropy_with_logits(adv_pred, new_targets) return adv_loss #", "# class PU_DROCC(nn.Module): # def __init__(self, ): # super().__init__() # # self.model =", "# else: # logits = self.model(x_adv_sampled) # # logits = torch.squeeze(logits, dim=1) #", "if not self.half: adv_loss = self.one_class_adv_loss(data[target == 1].detach(), self.half) else: adv_loss = self.one_class_adv_loss(logits_start.detach(),", "path): # self.model.load_state_dict(torch.load(os.path.join(path, 'model.pt'))) class PU_DROCC(PUModelRandomBatch): def __init__(self, model=Net, lam=0.5, radius=8, gamma=2, warmup_epochs=6,", "# only_ce_epochs=2, # ascent_step_size=5e-6, # ascent_num_steps=10, # gamma_lr=0.96, # batch_size=512, # half=True): #", "half: adv_pred = self.model.forward_end(x_adv_sampled) else: adv_pred = self.model(x_adv_sampled) adv_pred = torch.squeeze(adv_pred, dim=1) adv_loss", "-1 # for data, target, _ in test_loader: # batch_idx += 1 #", "# # We will perform SGD on these to find the adversarial points", "# if verbose: # self.model = copy.deepcopy(best_model) # print('\\nBest test {}: {}'.format( #", "x_train_data norm_h = torch.sqrt(torch.sum(h ** 2, dim=tuple(range(1, h.dim())))) alpha = torch.clamp(norm_h, self.radius, self.gamma", "Project the normal points to the set N_i(r) # h = x_adv_sampled -", "surface of hyper-sphere if half: adv_pred = self.model.forward_end(x_adv_sampled) else: adv_pred = self.model(x_adv_sampled) adv_pred", "*[1] * (grad.dim() - 1)) grad_normalized = grad / grad_norm with torch.no_grad(): x_adv_sampled.add_(self.ascent_step_size", "# for epoch in range(total_epochs): # # Make the weights trainable # self.model.train()", "gamma * R (set N_i(r)) 4) Pass the calculated adversarial points through the", "logits = torch.squeeze(logits, dim=1) # new_loss = F.binary_cross_entropy_with_logits(logits, new_targets) # # grad =", "self.ascent_step_size = ascent_step_size # self.lamda = lamda # self.radius = radius # self.gamma", "(grad.dim() - 1)) # grad_normalized = grad / grad_norm # with torch.no_grad(): #", "logits_start = self.model.forward_start(data) logits = self.model.forward_end(logits_start[lab_ind]) logits = torch.squeeze(logits, dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits,", "# Adversarial Loss is calculated only for the positive data points (label==1). #", "CE Loss ''' Adversarial Loss is calculated only for the positive data points", "import torch.nn.functional as F import torch.utils.data import torch.utils.data from models.base_models import OCModel, PUModelRandomBatch", "# # def save(self, path): # torch.save(self.model.state_dict(), os.path.join(path, 'model.pt')) # # def load(self,", "data points (label==1). # ''' # if epoch >= only_ce_epochs: # logits_start =", "the positive data points (label==1). ''' if self.epoch >= self.warmup_epochs and unl_cnt >", "* R (set N_i(r)) 4) Pass the calculated adversarial points through the model,", "import torch.utils.data from models.base_models import OCModel, PUModelRandomBatch from models.classifiers import Net device =", "h = proj * h x_adv_sampled = x_train_data + h # These adv_points", "# Backprop # loss.backward() # self.optimizer.step() # # epoch_ce_loss = epoch_ce_loss / (batch_idx", "= 0 # Cross entropy Loss # # batch_idx = -1 # for", "(alpha / norm_h).view(-1, *[1] * (h.dim() - 1)) # h = proj *", "points to the set N_i(r) h = x_adv_sampled - x_train_data norm_h = torch.sqrt(torch.sum(h", "radius R and gamma * R # (set N_i(r)) # 4) Pass the", "torch.squeeze(new_targets) # new_targets = new_targets.to(torch.float) # # if half: # logits = self.model.half_forward_end(x_adv_sampled)", "*[1] * (grad.dim() - 1)) # grad_normalized = grad / grad_norm # with", "borrowed from https://github.com/microsoft/EdgeML class DROCC(OCModel): def __init__(self, model=Net, lam=0.5, radius=8, gamma=2, warmup_epochs=6, ascent_step_size=0.001,", "else: adv_pred = self.model(x_adv_sampled) adv_pred = torch.squeeze(adv_pred, dim=1) adv_loss = F.binary_cross_entropy_with_logits(adv_pred, new_targets) return", "= proj * h x_adv_sampled = x_train_data + h # These adv_points are", "radius self.gamma = gamma self.warmup_epochs = warmup_epochs self.ascent_step_size = ascent_step_size self.ascent_num_steps = ascent_num_steps", "(set N_i(r)) 4) Pass the calculated adversarial points through the model, and calculate", "0 # 3) Project the points between spheres of radius R and gamma", "# 2) Gradient ascent to find the most optimal point in set N_i(r)", "ascent to find the most optimal point in set N_i(r) # classified as", "the points between spheres of radius R and gamma * R # (set", "will perform SGD on these to find the adversarial points x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_()", "adversarial points # x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() # x_adv_sampled = x_adv + x_train_data #", "half=True): \"\"\"Computes the adversarial loss: 1) Sample points initially at random around the", "= -np.inf # best_model = None # self.ascent_num_steps = ascent_num_steps # self.ascent_step_size =", "lamda # self.radius = radius # self.gamma = gamma # # self.optimizer =", "self.best_score # )) # # def test(self, test_loader, metric='AUC'): # \"\"\"Evaluate the model", "# # print('Epoch: {}, CE Loss: {}, AdvLoss: {}, {}: {}'.format( # epoch,", "If only CE based training has to be done # loss = ce_loss", "# batch_size=128, # half=True): # # self.best_score = -np.inf # best_model = None", "= torch.randn(x_train_data.shape).to(device).detach().requires_grad_() # x_adv_sampled = x_adv + x_train_data # # for step in", "object for the test dataset. # metric: Metric used for evaluation (AUC /", "# Compute test score # labels, scores = zip(*label_score) # labels = np.array(labels)", "test_loader: Dataloader object for the test dataset. # metric: Metric used for evaluation", "the most optimal point in set N_i(r) # classified as +ve (label=0). This", "# # adv_pred = torch.squeeze(adv_pred, dim=1) # adv_loss = F.binary_cross_entropy_with_logits(adv_pred, (new_targets)) # #", "# new_targets = torch.zeros(batch_size, 1).to(device) # # new_targets = (1 - targets).to(self.device) #", "# self.gamma * self.radius).to(device) # # Make use of broadcast to project h", "torch.randn(x_train_data.shape).to(device).detach().requires_grad_() x_adv_sampled = x_adv + x_train_data for step in range(self.ascent_num_steps): with torch.enable_grad(): new_targets", "3) Project the points between spheres of radius R and gamma * R", "# These adv_points are now on the surface of hyper-sphere if half: adv_pred", "= -1 # for data, _, target in train_loader: # batch_idx += 1", "Data Processing # data = data.to(torch.float) # target = target.to(torch.float) # target =", "% 10 == 0: # Project the normal points to the set N_i(r)", "os.path.join(path, 'model.pt')) # # def load(self, path): # self.model.load_state_dict(torch.load(os.path.join(path, 'model.pt'))) class PU_DROCC(PUModelRandomBatch): def", "# epoch_ce_loss += ce_loss # # ''' # Adversarial Loss is calculated only", "for printing average CE Loss # epoch_ce_loss += ce_loss # # ''' #", "loss wrt target class 0 # # Parameters # ---------- # x_train_data: Batch", "epoch_ce_loss = epoch_ce_loss / (batch_idx + 1) # Average CE Loss # epoch_adv_loss", "the positive training data points 2) Gradient ascent to find the most optimal", "# # logits = torch.squeeze(logits, dim=1) # ce_loss = F.binary_cross_entropy_with_logits(logits, target) # #", "max(unl_ind.sum(), 1) # # # Extract the logits for cross entropy loss #", "AdvLoss if not self.half: adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), self.half) else: adv_loss = self.one_class_adv_loss(logits_start[unl_ind].detach(), self.half)", "dim=1) # ce_loss = F.binary_cross_entropy_with_logits(logits, target) # # Add to the epoch variable", "proj = (alpha / norm_h).view(-1, *[1] * (h.dim() - 1)) # h =", "optimal point in set N_i(r) classified as +ve (label=0). This is done by", "self.optimizer.zero_grad() # # lab_ind = target == 1 # unl_ind = target ==", "# if half: # adv_pred = self.model.half_forward_end(x_adv_sampled) # else: # adv_pred = self.model(x_adv_sampled)", "if verbose: # test_score = self.test(test_loader) # if test_score > self.best_score: # self.best_score", "= self.model.forward_start(data) logits = self.model.forward_end(logits_start[lab_ind]) logits = torch.squeeze(logits, dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind])", "= (alpha / norm_h).view(-1, *[1] * (h.dim() - 1)) # h = proj", "# ''' # Adversarial Loss is calculated only for the positive data points", "adv_loss # # def save(self, path): # torch.save(self.model.state_dict(), os.path.join(path, 'model.pt')) # # def", "# self.best_score = -np.inf # best_model = None # self.ascent_num_steps = ascent_num_steps #", "# epoch_adv_loss = torch.tensor([0]).type(torch.float32).to(device) # AdvLoss # epoch_ce_loss = 0 # Cross entropy", "F.binary_cross_entropy_with_logits(adv_pred, new_targets) return adv_loss # class PU_DROCC(nn.Module): # def __init__(self, ): # super().__init__()", "*[1] * (h.dim() - 1)) h = proj * h x_adv_sampled = x_train_data", "x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_() x_adv_sampled = x_adv + x_train_data for step in range(self.ascent_num_steps): with", "# # grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0] # grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim())))", "epoch_ce_loss.item(), epoch_adv_loss.item(), # 'AUC', test_score)) # lr_scheduler.step() # if verbose: # self.model =", "adv_loss # class DROCC(nn.Module): # def __init__(self, ): # super().__init__() # # self.model", "cross entropy loss # logits_start = self.model.half_forward_start(data) # logits = self.model.half_forward_end(logits_start[lab_ind]) # #", "# self.model = CIFAR10_LeNet() # # def run_train(self, # train_data, # test_data, #", "gamma # # self.optimizer = optim.Adam(self.model.parameters(), lr=learning_rate) # lr_scheduler = optim.lr_scheduler.ExponentialLR(self.optimizer, gamma=gamma_lr) #", "lr_scheduler = optim.lr_scheduler.ExponentialLR(self.optimizer, gamma=gamma_lr) # # train_loader = torch.utils.data.DataLoader(train_data, # batch_size=batch_size, # shuffle=True)", "on. \"\"\" batch_size = len(x_train_data) # Randomly sample points around the training data", "copy.deepcopy(self.model) # # print('Epoch: {}, CE Loss: {}, AdvLoss: {}, {}: {}'.format( #", "** 2, # dim=tuple(range(1, h.dim())))) # alpha = torch.clamp(norm_h, self.radius, # self.gamma *", "= torch.zeros(batch_size, 1).to(device) # # new_targets = (1 - targets).to(self.device) # new_targets =", "the CE loss wrt target class 0 # # Parameters # ---------- #", "\"\"\" # self.model.eval() # label_score = [] # batch_idx = -1 # for", "# logits = torch.squeeze(logits, dim=1) # ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # # Add", "initially at random around the positive training data points 2) Gradient ascent to", "the model, # and calculate the CE loss wrt target class 0 #", "CIFAR10_LeNet() # # def run_train(self, # train_data, # test_data, # lamda=0.5, # radius=1,", "calculated adversarial points through the model, and calculate the CE loss wrt target", "test(self, test_loader, metric='AUC'): # \"\"\"Evaluate the model on the given test dataset. #", "# self.best_score = test_score # best_model = copy.deepcopy(self.model) # # print('Epoch: {}, CE", "# new_targets = new_targets.to(torch.float) # # if half: # logits = self.model.half_forward_end(x_adv_sampled) #", "respective 2 loss values # epoch_adv_loss = torch.tensor([0]).type(torch.float32).to(device) # AdvLoss # epoch_ce_loss =", "torch.enable_grad(): # # new_targets = torch.zeros(batch_size, 1).to(device) # # new_targets = (1 -", "radius # self.gamma = gamma # # self.optimizer = optim.Adam(self.model.parameters(), lr=learning_rate) # lr_scheduler", "x_train_data: Batch of data to compute loss on. # \"\"\" # batch_size =", "for data, target, _ in test_loader: # batch_idx += 1 # data, target", "# if not half: # adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), target[unl_ind], half) # else: #", "# # ''' # Adversarial Loss is calculated only for the positive data", "- x_train_data norm_h = torch.sqrt(torch.sum(h ** 2, dim=tuple(range(1, h.dim())))) alpha = torch.clamp(norm_h, self.radius,", "Make the weights trainable # self.model.train() # # # Placeholder for the respective", "dataset. # Parameters # ---------- # test_loader: Dataloader object for the test dataset.", "# Parameters # ---------- # test_loader: Dataloader object for the test dataset. #", "self.epoch >= self.warmup_epochs and unl_cnt > 1: logits_start = logits_start[unl_ind] # AdvLoss if", "Parameters # ---------- # test_loader: Dataloader object for the test dataset. # metric:", "CE Loss # epoch_ce_loss += ce_loss # # ''' # Adversarial Loss is", "= self.one_class_adv_loss(data[unl_ind].detach(), target[unl_ind], half) # else: # adv_loss = self.one_class_adv_loss(logits_start.detach(), target[unl_ind], half) #", "-1 # for data, target, _ in train_loader: # batch_idx += 1 #", "0 3) Project the points between spheres of radius R and gamma *", "logits_start[unl_ind] # AdvLoss if not self.half: adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), self.half) else: adv_loss =", "x_train_data + h # These adv_points are now on the surface of hyper-sphere", "self.one_class_adv_loss(data[unl_ind].detach(), target[unl_ind], half) # else: # adv_loss = self.one_class_adv_loss(logits_start.detach(), target[unl_ind], half) # epoch_adv_loss", "for the positive data points (label==1). # ''' # if epoch >= only_ce_epochs", "set N_i(r) h = x_adv_sampled - x_train_data norm_h = torch.sqrt(torch.sum(h ** 2, dim=tuple(range(1,", "target == 0 # lab_cnt = max(lab_ind.sum(), 1) unl_cnt = max(unl_ind.sum(), 1) #", "* self.lamda # else: # # If only CE based training has to", "# If only CE based training has to be done # loss =", "through the model, and calculate the CE loss wrt target class 0 Parameters", "only_ce_epochs: # logits_start = logits_start[target == 1] # # AdvLoss # if not", "== 0 # # # lab_cnt = max(lab_ind.sum(), 1) # unl_cnt = max(unl_ind.sum(),", "self.model.eval() # label_score = [] # batch_idx = -1 # for data, target,", "adv_loss = F.binary_cross_entropy_with_logits(adv_pred, new_targets) return adv_loss # class PU_DROCC(nn.Module): # def __init__(self, ):", "batch_size = len(x_train_data) # Randomly sample points around the training data # We", "be done # loss = ce_loss # # # Backprop # loss.backward() #", "scores) # if metric == 'alpha': # test_metric = (scores > 0.5).mean() #", "= target.to(torch.float) # target = torch.squeeze(target) # # self.optimizer.zero_grad() # # # Extract", "= CIFAR10_LeNet() # # def run_train(self, # train_data, # test_data, # lamda=0.5, #", "batch_idx += 1 # data, target = data.to(device), target.to(device) # # Data Processing", "by maximizing # the CE loss wrt label 0 # 3) Project the", "target) # # Add to the epoch variable for printing average CE Loss", "x_train_data: Batch of data to compute loss on. \"\"\" batch_size = len(x_train_data) #", "data points (label==1). ''' if self.epoch >= self.warmup_epochs: logits_start = logits_start[target == 1]", "find the most optimal point in set N_i(r) # classified as +ve (label=0).", "logits for cross entropy loss # logits_start = self.model.half_forward_start(data) # logits = self.model.half_forward_end(logits_start[lab_ind])", "+ adv_loss * self.lam else: # If only CE based training has to", "printing average CE Loss # epoch_ce_loss += ce_loss # # ''' # Adversarial", "dim=tuple(range(1, grad.dim()))) grad_norm = grad_norm.view(-1, *[1] * (grad.dim() - 1)) grad_normalized = grad", "the model on the given test dataset. # Parameters # ---------- # test_loader:", "# # train_loader = torch.utils.data.DataLoader(train_data, # batch_size=batch_size, # shuffle=True) # # test_loader =", "for data, _, target in train_loader: # batch_idx += 1 # data, target", "if self.epoch >= self.warmup_epochs: logits_start = logits_start[target == 1] # AdvLoss if not", "target = target.to(torch.float) # target = torch.squeeze(target) # # self.optimizer.zero_grad() # # lab_ind", "# # If only CE based training has to be done # loss", "{}'.format( # epoch, epoch_ce_loss.item(), epoch_adv_loss.item(), # 'AUC', test_score)) # lr_scheduler.step() # if verbose:", "self.half) loss = ce_loss + adv_loss * self.lam else: # If only CE", "AdvLoss if not self.half: adv_loss = self.one_class_adv_loss(data[target == 1].detach(), self.half) else: adv_loss =", "unl_cnt = max(unl_ind.sum(), 1) # # # Extract the logits for cross entropy", "epoch_adv_loss.item(), # 'AUC', test_score)) # lr_scheduler.step() # if verbose: # self.model = copy.deepcopy(best_model)", "dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # Add to the epoch variable for printing", "max(lab_ind.sum(), 1) # unl_cnt = max(unl_ind.sum(), 1) # # # Extract the logits", "positive training # data points # 2) Gradient ascent to find the most", "# logits = self.model.half_forward_end(logits_start[lab_ind]) # # logits = torch.squeeze(logits, dim=1) # ce_loss =", "# target = target.to(torch.float) # target = torch.squeeze(target) # # logits = self.model(data)", "verbose: # self.model = copy.deepcopy(best_model) # print('\\nBest test {}: {}'.format( # 'AUC', self.best_score", "h.dim())))) alpha = torch.clamp(norm_h, self.radius, self.gamma * self.radius).to(device) # Make use of broadcast", "= torch.zeros(batch_size, 1).to(device) # new_targets = (1 - targets).to(self.device) new_targets = torch.squeeze(new_targets) new_targets", "h = x_adv_sampled - x_train_data norm_h = torch.sqrt(torch.sum(h ** 2, dim=tuple(range(1, h.dim())))) alpha", "= grad_norm.view(-1, *[1] * (grad.dim() - 1)) grad_normalized = grad / grad_norm with", "self.model.forward_start(data) logits = self.model.forward_end(logits_start) logits = torch.squeeze(logits, dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits, target) #", "= ascent_step_size self.ascent_num_steps = ascent_num_steps self.half = half def batch_loss(self, batch): data, target", "- 1)) grad_normalized = grad / grad_norm with torch.no_grad(): x_adv_sampled.add_(self.ascent_step_size * grad_normalized) if", "# (set N_i(r)) # 4) Pass the calculated adversarial points through the model,", "else: # adv_loss = self.one_class_adv_loss(logits_start.detach(), target[unl_ind], half) # epoch_adv_loss += adv_loss # #", "= self.model.forward_end(logits_start[lab_ind]) logits = torch.squeeze(logits, dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # Add to", "== 1], half) # epoch_adv_loss += adv_loss # # loss = ce_loss +", "maximizing the CE loss wrt label 0 3) Project the points between spheres", "adv_loss = self.one_class_adv_loss(logits_start.detach(), target[target == 1], half) # epoch_adv_loss += adv_loss # #", "if not half: # adv_loss = self.one_class_adv_loss(data[target == 1].detach(), target[target == 1], half)", "= self.one_class_adv_loss(logits_start.detach(), target[unl_ind], half) # epoch_adv_loss += adv_loss # # loss = ce_loss", "save(self, path): # torch.save(self.model.state_dict(), os.path.join(path, 'model.pt')) # # def load(self, path): # self.model.load_state_dict(torch.load(os.path.join(path,", "This is done by maximizing # the CE loss wrt label 0 #", "# test_data, # lamda=0.5, # radius=1, # gamma=2, # verbose=False, # learning_rate=5e-4, #", "# \"\"\" # batch_size = len(x_train_data) # # Randomly sample points around the", "unl_ind = target == 0 # # # lab_cnt = max(lab_ind.sum(), 1) #", "logits = self.model(x_adv_sampled) logits = torch.squeeze(logits, dim=1) new_loss = F.binary_cross_entropy_with_logits(logits, new_targets) grad =", "CE loss wrt target class 0 # # Parameters # ---------- # x_train_data:", "has to be done # loss = ce_loss # # # Backprop #", "We will perform SGD on these to find the adversarial points # x_adv", "logits_start = self.model.forward_start(data) logits = self.model.forward_end(logits_start) logits = torch.squeeze(logits, dim=1) ce_loss = F.binary_cross_entropy_with_logits(logits,", "if (step + 1) % 10 == 0: # Project the normal points", "# # AdvLoss # if not half: # adv_loss = self.one_class_adv_loss(data[target == 1].detach(),", "{}, {}: {}'.format( # epoch, epoch_ce_loss.item(), epoch_adv_loss.item(), # 'AUC', test_score)) # lr_scheduler.step() #", "= target.to(torch.float) # target = torch.squeeze(target) # # self.optimizer.zero_grad() # # lab_ind =", "ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind]) # # Add to the epoch variable for printing", "are now on the surface of hyper-sphere if half: adv_pred = self.model.forward_end(x_adv_sampled) else:", "wrt target class 0 # # Parameters # ---------- # x_train_data: Batch of", "# batch_idx += 1 # data, target = data.to(device), target.to(device) # # Data", "ce_loss # # # Backprop # loss.backward() # self.optimizer.step() # # epoch_ce_loss =", "* self.radius).to(device) # Make use of broadcast to project h proj = (alpha", "# total_epochs=20, # only_ce_epochs=2, # ascent_step_size=5e-6, # ascent_num_steps=10, # gamma_lr=0.96, # batch_size=512, #", "= grad / grad_norm with torch.no_grad(): x_adv_sampled.add_(self.ascent_step_size * grad_normalized) if (step + 1)" ]
[ "async def read_from_socket(host, port): timer = 0 reader, writer = None, None async", "writer.write('\\n'.encode()) await register(reader, writer, args) else: await authorise(reader, writer, args, token) writer.write('{}\\n\\n'.format(args.text.replace('\\n', '", "reader.readline() logging.debug(answer.decode(\"utf-8\")) if answer.decode(\"utf-8\") == 'null\\n': logging.warning(\"Wrong token, let's get another one\") await", "it's oblicated for first run\") parser.add_argument('--token_file', default=\"token.txt\", help=\"set a file with token\") parser.add_argument('--text',", "== 'null\\n': logging.warning(\"Wrong token, let's get another one\") await register(reader, writer, args) async", "%s seconds', 2 ** timer) await asyncio.sleep(2 ** timer) timer += 1 except", "if you do not have the correct token file\") logging.error('exiting') sys.exit() temp =", "= datetime.datetime.now().strftime(\"%y.%m.%d %H.%M\") await _file.write('[{}] {}'.format(time_now, text.decode(\"utf-8\"))) print(text.decode(\"utf-8\")) except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s", "submit_message(host, port, args): timer = 0 try: async with AIOFile(args.token_file, 'r') as _file:", "correct token file\") logging.error('exiting') sys.exit() temp = await reader.readline() logging.debug(temp.decode(\"utf-8\")) user = '{}\\n'.format(args.user.replace('\\n',", "mode\") args = parser.parse_args() tasks = [] if not args.send_only: tasks.append(asyncio.create_task(read_from_socket(args.host, args.rport))) if", "another one\") await register(reader, writer, args) async def main(): logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser(description='connect", "with AIOFile(\"text.txt\", 'a') as _file: while True: try: if not reader or not", "seconds', 2 ** timer) await asyncio.sleep(2 ** timer) timer += 1 except asyncio.CancelledError:", "asyncio.CancelledError: writer.close() raise async def submit_message(host, port, args): timer = 0 try: async", "args.rport))) if args.text: tasks.append(asyncio.create_task(submit_message(args.host, args.sport, args))) for task in tasks: await task if", "asyncio.CancelledError: writer.close() raise async def register(reader, writer, args): if not args.user: logging.error(\"It's obligated", "writer, args): if not args.user: logging.error(\"It's obligated to specidy login if you do", "timer = 0 reader, writer = None, None async with AIOFile(\"text.txt\", 'a') as", "writer.close() raise async def register(reader, writer, args): if not args.user: logging.error(\"It's obligated to", "def authorise(reader, writer, args, token): writer.write('{}\\n'.format(token.replace('\\n', '')).encode()) answer = await reader.readline() logging.debug(answer.decode(\"utf-8\")) if", "token = await _file.read() except FileNotFoundError: token = None while True: try: reader,", "writer = await asyncio.open_connection(host=host, port=port) text = await reader.readline() time_now = datetime.datetime.now().strftime(\"%y.%m.%d %H.%M\")", "try: if not reader or not writer: reader, writer = await asyncio.open_connection(host=host, port=port)", "logging import datetime import sys import json from aiofile import AIOFile async def", "register(reader, writer, args) async def main(): logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser(description='connect to secret chat')", "read_from_socket(host, port): timer = 0 reader, writer = None, None async with AIOFile(\"text.txt\",", "= parser.parse_args() tasks = [] if not args.send_only: tasks.append(asyncio.create_task(read_from_socket(args.host, args.rport))) if args.text: tasks.append(asyncio.create_task(submit_message(args.host,", "writer, args, token) writer.write('{}\\n\\n'.format(args.text.replace('\\n', ' ')).encode()) logging.info('text has been successfully sent') return except", "reader, writer = await asyncio.open_connection(host=host, port=port) text = await reader.readline() time_now = datetime.datetime.now().strftime(\"%y.%m.%d", "parser.add_argument('--user', help=\"set a username, it's oblicated for first run\") parser.add_argument('--token_file', default=\"token.txt\", help=\"set a", "tasks.append(asyncio.create_task(read_from_socket(args.host, args.rport))) if args.text: tasks.append(asyncio.create_task(submit_message(args.host, args.sport, args))) for task in tasks: await task", "time_now = datetime.datetime.now().strftime(\"%y.%m.%d %H.%M\") await _file.write('[{}] {}'.format(time_now, text.decode(\"utf-8\"))) print(text.decode(\"utf-8\")) except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep", "logging.error(\"It's obligated to specidy login if you do not have the correct token", "logging.debug(token) async with AIOFile(args.token_file, 'w') as _file: await _file.write(token) async def authorise(reader, writer,", "help=\"set a file with token\") parser.add_argument('--text', help=\"set a text to send\") parser.add_argument('--send_only', action='store_true',", "'a') as _file: while True: try: if not reader or not writer: reader,", "async with AIOFile(args.token_file, 'r') as _file: token = await _file.read() except FileNotFoundError: token", "2 ** timer) await asyncio.sleep(2 ** timer) reader, writer = None, None timer", "async def authorise(reader, writer, args, token): writer.write('{}\\n'.format(token.replace('\\n', '')).encode()) answer = await reader.readline() logging.debug(answer.decode(\"utf-8\"))", "= json.loads(answer) token = answer_dict['account_hash'] logging.debug(token) async with AIOFile(args.token_file, 'w') as _file: await", "to connect') parser.add_argument('--rport', default=5000, type=int, help='Specify port to receive msg') parser.add_argument('--sport', default=5050, type=int,", "while True: try: reader, writer = await asyncio.open_connection(host=host, port=port) temp = await reader.readline()", "'r') as _file: token = await _file.read() except FileNotFoundError: token = None while", "None timer += 1 except asyncio.CancelledError: writer.close() raise async def submit_message(host, port, args):", "token file\") logging.error('exiting') sys.exit() temp = await reader.readline() logging.debug(temp.decode(\"utf-8\")) user = '{}\\n'.format(args.user.replace('\\n', '", "'null\\n': logging.warning(\"Wrong token, let's get another one\") await register(reader, writer, args) async def", "send only mode\") args = parser.parse_args() tasks = [] if not args.send_only: tasks.append(asyncio.create_task(read_from_socket(args.host,", "you do not have the correct token file\") logging.error('exiting') sys.exit() temp = await", "await asyncio.sleep(2 ** timer) reader, writer = None, None timer += 1 except", "+= 1 except asyncio.CancelledError: writer.close() raise async def submit_message(host, port, args): timer =", "token = None while True: try: reader, writer = await asyncio.open_connection(host=host, port=port) temp", "= await reader.readline() time_now = datetime.datetime.now().strftime(\"%y.%m.%d %H.%M\") await _file.write('[{}] {}'.format(time_now, text.decode(\"utf-8\"))) print(text.decode(\"utf-8\")) except", "to receive msg') parser.add_argument('--sport', default=5050, type=int, help='Specify port to send msg') parser.add_argument('--user', help=\"set", "sys.exit() temp = await reader.readline() logging.debug(temp.decode(\"utf-8\")) user = '{}\\n'.format(args.user.replace('\\n', ' ')) writer.write(user.encode()) answer", "args): if not args.user: logging.error(\"It's obligated to specidy login if you do not", "await register(reader, writer, args) async def main(): logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser(description='connect to secret", "def main(): logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser(description='connect to secret chat') parser.add_argument('--host', default='minechat.dvmn.org', help='Host to", "await reader.readline() logging.debug(temp.decode(\"utf-8\")) if not token: writer.write('\\n'.encode()) await register(reader, writer, args) else: await", "argparse import asyncio import logging import datetime import sys import json from aiofile", "asyncio.open_connection(host=host, port=port) temp = await reader.readline() logging.debug(temp.decode(\"utf-8\")) if not token: writer.write('\\n'.encode()) await register(reader,", "to specidy login if you do not have the correct token file\") logging.error('exiting')", "await authorise(reader, writer, args, token) writer.write('{}\\n\\n'.format(args.text.replace('\\n', ' ')).encode()) logging.info('text has been successfully sent')", "answer_dict = json.loads(answer) token = answer_dict['account_hash'] logging.debug(token) async with AIOFile(args.token_file, 'w') as _file:", "a text to send\") parser.add_argument('--send_only', action='store_true', help=\"set a send only mode\") args =", "datetime import sys import json from aiofile import AIOFile async def read_from_socket(host, port):", "token) writer.write('{}\\n\\n'.format(args.text.replace('\\n', ' ')).encode()) logging.info('text has been successfully sent') return except (ConnectionRefusedError, ConnectionResetError):", "args))) for task in tasks: await task if __name__ == \"__main__\": try: asyncio.run(main())", "to send msg') parser.add_argument('--user', help=\"set a username, it's oblicated for first run\") parser.add_argument('--token_file',", "logging.warning('sleep %s seconds', 2 ** timer) await asyncio.sleep(2 ** timer) timer += 1", "import datetime import sys import json from aiofile import AIOFile async def read_from_socket(host,", "parser = argparse.ArgumentParser(description='connect to secret chat') parser.add_argument('--host', default='minechat.dvmn.org', help='Host to connect') parser.add_argument('--rport', default=5000,", "timer) timer += 1 except asyncio.CancelledError: writer.close() raise async def register(reader, writer, args):", "help=\"set a text to send\") parser.add_argument('--send_only', action='store_true', help=\"set a send only mode\") args", "if not args.send_only: tasks.append(asyncio.create_task(read_from_socket(args.host, args.rport))) if args.text: tasks.append(asyncio.create_task(submit_message(args.host, args.sport, args))) for task in", "logging.debug(answer.decode(\"utf-8\")) if answer.decode(\"utf-8\") == 'null\\n': logging.warning(\"Wrong token, let's get another one\") await register(reader,", "except asyncio.CancelledError: writer.close() raise async def submit_message(host, port, args): timer = 0 try:", "= await reader.readline() logging.debug(temp.decode(\"utf-8\")) user = '{}\\n'.format(args.user.replace('\\n', ' ')) writer.write(user.encode()) answer = await", "+= 1 except asyncio.CancelledError: writer.close() raise async def register(reader, writer, args): if not", "for first run\") parser.add_argument('--token_file', default=\"token.txt\", help=\"set a file with token\") parser.add_argument('--text', help=\"set a", "default=\"token.txt\", help=\"set a file with token\") parser.add_argument('--text', help=\"set a text to send\") parser.add_argument('--send_only',", "= await asyncio.open_connection(host=host, port=port) text = await reader.readline() time_now = datetime.datetime.now().strftime(\"%y.%m.%d %H.%M\") await", "writer.write('{}\\n'.format(token.replace('\\n', '')).encode()) answer = await reader.readline() logging.debug(answer.decode(\"utf-8\")) if answer.decode(\"utf-8\") == 'null\\n': logging.warning(\"Wrong token,", "authorise(reader, writer, args, token) writer.write('{}\\n\\n'.format(args.text.replace('\\n', ' ')).encode()) logging.info('text has been successfully sent') return", "AIOFile(\"text.txt\", 'a') as _file: while True: try: if not reader or not writer:", "tasks.append(asyncio.create_task(submit_message(args.host, args.sport, args))) for task in tasks: await task if __name__ == \"__main__\":", "aiofile import AIOFile async def read_from_socket(host, port): timer = 0 reader, writer =", "tasks = [] if not args.send_only: tasks.append(asyncio.create_task(read_from_socket(args.host, args.rport))) if args.text: tasks.append(asyncio.create_task(submit_message(args.host, args.sport, args)))", "writer.close() raise async def submit_message(host, port, args): timer = 0 try: async with", "port=port) text = await reader.readline() time_now = datetime.datetime.now().strftime(\"%y.%m.%d %H.%M\") await _file.write('[{}] {}'.format(time_now, text.decode(\"utf-8\")))", "** timer) await asyncio.sleep(2 ** timer) reader, writer = None, None timer +=", "True: try: reader, writer = await asyncio.open_connection(host=host, port=port) temp = await reader.readline() logging.debug(temp.decode(\"utf-8\"))", "help='Host to connect') parser.add_argument('--rport', default=5000, type=int, help='Specify port to receive msg') parser.add_argument('--sport', default=5050,", "timer) await asyncio.sleep(2 ** timer) timer += 1 except asyncio.CancelledError: writer.close() raise async", "args) else: await authorise(reader, writer, args, token) writer.write('{}\\n\\n'.format(args.text.replace('\\n', ' ')).encode()) logging.info('text has been", "a username, it's oblicated for first run\") parser.add_argument('--token_file', default=\"token.txt\", help=\"set a file with", "_file.write('[{}] {}'.format(time_now, text.decode(\"utf-8\"))) print(text.decode(\"utf-8\")) except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s seconds', 2 ** timer)", "get another one\") await register(reader, writer, args) async def main(): logging.basicConfig(level=logging.INFO) parser =", "try: reader, writer = await asyncio.open_connection(host=host, port=port) temp = await reader.readline() logging.debug(temp.decode(\"utf-8\")) if", "register(reader, writer, args): if not args.user: logging.error(\"It's obligated to specidy login if you", "{}'.format(time_now, text.decode(\"utf-8\"))) print(text.decode(\"utf-8\")) except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s seconds', 2 ** timer) await", "= answer_dict['account_hash'] logging.debug(token) async with AIOFile(args.token_file, 'w') as _file: await _file.write(token) async def", "msg') parser.add_argument('--user', help=\"set a username, it's oblicated for first run\") parser.add_argument('--token_file', default=\"token.txt\", help=\"set", "token: writer.write('\\n'.encode()) await register(reader, writer, args) else: await authorise(reader, writer, args, token) writer.write('{}\\n\\n'.format(args.text.replace('\\n',", "port to receive msg') parser.add_argument('--sport', default=5050, type=int, help='Specify port to send msg') parser.add_argument('--user',", "file\") logging.error('exiting') sys.exit() temp = await reader.readline() logging.debug(temp.decode(\"utf-8\")) user = '{}\\n'.format(args.user.replace('\\n', ' '))", "timer += 1 except asyncio.CancelledError: writer.close() raise async def register(reader, writer, args): if", "register(reader, writer, args) else: await authorise(reader, writer, args, token) writer.write('{}\\n\\n'.format(args.text.replace('\\n', ' ')).encode()) logging.info('text", "writer: reader, writer = await asyncio.open_connection(host=host, port=port) text = await reader.readline() time_now =", "1 except asyncio.CancelledError: writer.close() raise async def submit_message(host, port, args): timer = 0", "import asyncio import logging import datetime import sys import json from aiofile import", "'w') as _file: await _file.write(token) async def authorise(reader, writer, args, token): writer.write('{}\\n'.format(token.replace('\\n', '')).encode())", "AIOFile async def read_from_socket(host, port): timer = 0 reader, writer = None, None", "not token: writer.write('\\n'.encode()) await register(reader, writer, args) else: await authorise(reader, writer, args, token)", "text.decode(\"utf-8\"))) print(text.decode(\"utf-8\")) except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s seconds', 2 ** timer) await asyncio.sleep(2", "datetime.datetime.now().strftime(\"%y.%m.%d %H.%M\") await _file.write('[{}] {}'.format(time_now, text.decode(\"utf-8\"))) print(text.decode(\"utf-8\")) except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s seconds',", "authorise(reader, writer, args, token): writer.write('{}\\n'.format(token.replace('\\n', '')).encode()) answer = await reader.readline() logging.debug(answer.decode(\"utf-8\")) if answer.decode(\"utf-8\")", "= None while True: try: reader, writer = await asyncio.open_connection(host=host, port=port) temp =", "as _file: while True: try: if not reader or not writer: reader, writer", "default=5050, type=int, help='Specify port to send msg') parser.add_argument('--user', help=\"set a username, it's oblicated", "do not have the correct token file\") logging.error('exiting') sys.exit() temp = await reader.readline()", "asyncio import logging import datetime import sys import json from aiofile import AIOFile", "default='minechat.dvmn.org', help='Host to connect') parser.add_argument('--rport', default=5000, type=int, help='Specify port to receive msg') parser.add_argument('--sport',", "seconds', 2 ** timer) await asyncio.sleep(2 ** timer) reader, writer = None, None", "raise async def submit_message(host, port, args): timer = 0 try: async with AIOFile(args.token_file,", "writer = await asyncio.open_connection(host=host, port=port) temp = await reader.readline() logging.debug(temp.decode(\"utf-8\")) if not token:", "answer = await reader.readline() logging.debug(answer.decode(\"utf-8\")) answer_dict = json.loads(answer) token = answer_dict['account_hash'] logging.debug(token) async", "except FileNotFoundError: token = None while True: try: reader, writer = await asyncio.open_connection(host=host,", "with AIOFile(args.token_file, 'w') as _file: await _file.write(token) async def authorise(reader, writer, args, token):", "reader.readline() time_now = datetime.datetime.now().strftime(\"%y.%m.%d %H.%M\") await _file.write('[{}] {}'.format(time_now, text.decode(\"utf-8\"))) print(text.decode(\"utf-8\")) except (ConnectionRefusedError, ConnectionResetError):", "')) writer.write(user.encode()) answer = await reader.readline() logging.debug(answer.decode(\"utf-8\")) answer_dict = json.loads(answer) token = answer_dict['account_hash']", "receive msg') parser.add_argument('--sport', default=5050, type=int, help='Specify port to send msg') parser.add_argument('--user', help=\"set a", "_file: while True: try: if not reader or not writer: reader, writer =", "writer.write(user.encode()) answer = await reader.readline() logging.debug(answer.decode(\"utf-8\")) answer_dict = json.loads(answer) token = answer_dict['account_hash'] logging.debug(token)", "= await asyncio.open_connection(host=host, port=port) temp = await reader.readline() logging.debug(temp.decode(\"utf-8\")) if not token: writer.write('\\n'.encode())", "if not args.user: logging.error(\"It's obligated to specidy login if you do not have", "' ')).encode()) logging.info('text has been successfully sent') return except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s", "_file: await _file.write(token) async def authorise(reader, writer, args, token): writer.write('{}\\n'.format(token.replace('\\n', '')).encode()) answer =", "= argparse.ArgumentParser(description='connect to secret chat') parser.add_argument('--host', default='minechat.dvmn.org', help='Host to connect') parser.add_argument('--rport', default=5000, type=int,", "args.sport, args))) for task in tasks: await task if __name__ == \"__main__\": try:", "= 0 try: async with AIOFile(args.token_file, 'r') as _file: token = await _file.read()", "username, it's oblicated for first run\") parser.add_argument('--token_file', default=\"token.txt\", help=\"set a file with token\")", "if not token: writer.write('\\n'.encode()) await register(reader, writer, args) else: await authorise(reader, writer, args,", "= '{}\\n'.format(args.user.replace('\\n', ' ')) writer.write(user.encode()) answer = await reader.readline() logging.debug(answer.decode(\"utf-8\")) answer_dict = json.loads(answer)", "= None, None timer += 1 except asyncio.CancelledError: writer.close() raise async def submit_message(host,", "** timer) reader, writer = None, None timer += 1 except asyncio.CancelledError: writer.close()", "asyncio.sleep(2 ** timer) timer += 1 except asyncio.CancelledError: writer.close() raise async def register(reader,", "file with token\") parser.add_argument('--text', help=\"set a text to send\") parser.add_argument('--send_only', action='store_true', help=\"set a", "= None, None async with AIOFile(\"text.txt\", 'a') as _file: while True: try: if", "answer.decode(\"utf-8\") == 'null\\n': logging.warning(\"Wrong token, let's get another one\") await register(reader, writer, args)", "temp = await reader.readline() logging.debug(temp.decode(\"utf-8\")) if not token: writer.write('\\n'.encode()) await register(reader, writer, args)", "logging.debug(answer.decode(\"utf-8\")) answer_dict = json.loads(answer) token = answer_dict['account_hash'] logging.debug(token) async with AIOFile(args.token_file, 'w') as", "def register(reader, writer, args): if not args.user: logging.error(\"It's obligated to specidy login if", "import logging import datetime import sys import json from aiofile import AIOFile async", "if not reader or not writer: reader, writer = await asyncio.open_connection(host=host, port=port) text", "msg') parser.add_argument('--sport', default=5050, type=int, help='Specify port to send msg') parser.add_argument('--user', help=\"set a username,", "send\") parser.add_argument('--send_only', action='store_true', help=\"set a send only mode\") args = parser.parse_args() tasks =", "token = answer_dict['account_hash'] logging.debug(token) async with AIOFile(args.token_file, 'w') as _file: await _file.write(token) async", "async def register(reader, writer, args): if not args.user: logging.error(\"It's obligated to specidy login", "args.user: logging.error(\"It's obligated to specidy login if you do not have the correct", "asyncio.sleep(2 ** timer) reader, writer = None, None timer += 1 except asyncio.CancelledError:", "def read_from_socket(host, port): timer = 0 reader, writer = None, None async with", "logging.info('text has been successfully sent') return except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s seconds', 2", "logging.debug(temp.decode(\"utf-8\")) user = '{}\\n'.format(args.user.replace('\\n', ' ')) writer.write(user.encode()) answer = await reader.readline() logging.debug(answer.decode(\"utf-8\")) answer_dict", "reader.readline() logging.debug(temp.decode(\"utf-8\")) user = '{}\\n'.format(args.user.replace('\\n', ' ')) writer.write(user.encode()) answer = await reader.readline() logging.debug(answer.decode(\"utf-8\"))", "help=\"set a send only mode\") args = parser.parse_args() tasks = [] if not", "parser.add_argument('--sport', default=5050, type=int, help='Specify port to send msg') parser.add_argument('--user', help=\"set a username, it's", "(ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s seconds', 2 ** timer) await asyncio.sleep(2 ** timer) reader,", "%H.%M\") await _file.write('[{}] {}'.format(time_now, text.decode(\"utf-8\"))) print(text.decode(\"utf-8\")) except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s seconds', 2", "** timer) timer += 1 except asyncio.CancelledError: writer.close() raise async def register(reader, writer,", "None, None timer += 1 except asyncio.CancelledError: writer.close() raise async def submit_message(host, port,", "async with AIOFile(args.token_file, 'w') as _file: await _file.write(token) async def authorise(reader, writer, args,", "a send only mode\") args = parser.parse_args() tasks = [] if not args.send_only:", "await reader.readline() logging.debug(answer.decode(\"utf-8\")) if answer.decode(\"utf-8\") == 'null\\n': logging.warning(\"Wrong token, let's get another one\")", "or not writer: reader, writer = await asyncio.open_connection(host=host, port=port) text = await reader.readline()", "'{}\\n'.format(args.user.replace('\\n', ' ')) writer.write(user.encode()) answer = await reader.readline() logging.debug(answer.decode(\"utf-8\")) answer_dict = json.loads(answer) token", "import json from aiofile import AIOFile async def read_from_socket(host, port): timer = 0", "= await reader.readline() logging.debug(temp.decode(\"utf-8\")) if not token: writer.write('\\n'.encode()) await register(reader, writer, args) else:", "except asyncio.CancelledError: writer.close() raise async def register(reader, writer, args): if not args.user: logging.error(\"It's", "as _file: await _file.write(token) async def authorise(reader, writer, args, token): writer.write('{}\\n'.format(token.replace('\\n', '')).encode()) answer", "0 reader, writer = None, None async with AIOFile(\"text.txt\", 'a') as _file: while", "await _file.write(token) async def authorise(reader, writer, args, token): writer.write('{}\\n'.format(token.replace('\\n', '')).encode()) answer = await", "')).encode()) logging.info('text has been successfully sent') return except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s seconds',", "text to send\") parser.add_argument('--send_only', action='store_true', help=\"set a send only mode\") args = parser.parse_args()", "parser.add_argument('--token_file', default=\"token.txt\", help=\"set a file with token\") parser.add_argument('--text', help=\"set a text to send\")", "in tasks: await task if __name__ == \"__main__\": try: asyncio.run(main()) except KeyboardInterrupt: pass", "successfully sent') return except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s seconds', 2 ** timer) await", "help='Specify port to send msg') parser.add_argument('--user', help=\"set a username, it's oblicated for first", "0 try: async with AIOFile(args.token_file, 'r') as _file: token = await _file.read() except", "AIOFile(args.token_file, 'w') as _file: await _file.write(token) async def authorise(reader, writer, args, token): writer.write('{}\\n'.format(token.replace('\\n',", "[] if not args.send_only: tasks.append(asyncio.create_task(read_from_socket(args.host, args.rport))) if args.text: tasks.append(asyncio.create_task(submit_message(args.host, args.sport, args))) for task", "to secret chat') parser.add_argument('--host', default='minechat.dvmn.org', help='Host to connect') parser.add_argument('--rport', default=5000, type=int, help='Specify port", "for task in tasks: await task if __name__ == \"__main__\": try: asyncio.run(main()) except", "writer = None, None async with AIOFile(\"text.txt\", 'a') as _file: while True: try:", "reader or not writer: reader, writer = await asyncio.open_connection(host=host, port=port) text = await", "parser.add_argument('--host', default='minechat.dvmn.org', help='Host to connect') parser.add_argument('--rport', default=5000, type=int, help='Specify port to receive msg')", "writer.write('{}\\n\\n'.format(args.text.replace('\\n', ' ')).encode()) logging.info('text has been successfully sent') return except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep", "json from aiofile import AIOFile async def read_from_socket(host, port): timer = 0 reader,", "timer) await asyncio.sleep(2 ** timer) reader, writer = None, None timer += 1", "reader.readline() logging.debug(temp.decode(\"utf-8\")) if not token: writer.write('\\n'.encode()) await register(reader, writer, args) else: await authorise(reader,", "port to send msg') parser.add_argument('--user', help=\"set a username, it's oblicated for first run\")", "%s seconds', 2 ** timer) await asyncio.sleep(2 ** timer) reader, writer = None,", "2 ** timer) await asyncio.sleep(2 ** timer) timer += 1 except asyncio.CancelledError: writer.close()", "** timer) await asyncio.sleep(2 ** timer) timer += 1 except asyncio.CancelledError: writer.close() raise", "have the correct token file\") logging.error('exiting') sys.exit() temp = await reader.readline() logging.debug(temp.decode(\"utf-8\")) user", "main(): logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser(description='connect to secret chat') parser.add_argument('--host', default='minechat.dvmn.org', help='Host to connect')", "import sys import json from aiofile import AIOFile async def read_from_socket(host, port): timer", "run\") parser.add_argument('--token_file', default=\"token.txt\", help=\"set a file with token\") parser.add_argument('--text', help=\"set a text to", "to send\") parser.add_argument('--send_only', action='store_true', help=\"set a send only mode\") args = parser.parse_args() tasks", "async def main(): logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser(description='connect to secret chat') parser.add_argument('--host', default='minechat.dvmn.org', help='Host", "None async with AIOFile(\"text.txt\", 'a') as _file: while True: try: if not reader", "args): timer = 0 try: async with AIOFile(args.token_file, 'r') as _file: token =", "_file: token = await _file.read() except FileNotFoundError: token = None while True: try:", "token): writer.write('{}\\n'.format(token.replace('\\n', '')).encode()) answer = await reader.readline() logging.debug(answer.decode(\"utf-8\")) if answer.decode(\"utf-8\") == 'null\\n': logging.warning(\"Wrong", "writer, args) else: await authorise(reader, writer, args, token) writer.write('{}\\n\\n'.format(args.text.replace('\\n', ' ')).encode()) logging.info('text has", "type=int, help='Specify port to send msg') parser.add_argument('--user', help=\"set a username, it's oblicated for", "port, args): timer = 0 try: async with AIOFile(args.token_file, 'r') as _file: token", "await _file.read() except FileNotFoundError: token = None while True: try: reader, writer =", "asyncio.open_connection(host=host, port=port) text = await reader.readline() time_now = datetime.datetime.now().strftime(\"%y.%m.%d %H.%M\") await _file.write('[{}] {}'.format(time_now,", "not writer: reader, writer = await asyncio.open_connection(host=host, port=port) text = await reader.readline() time_now", "if args.text: tasks.append(asyncio.create_task(submit_message(args.host, args.sport, args))) for task in tasks: await task if __name__", "temp = await reader.readline() logging.debug(temp.decode(\"utf-8\")) user = '{}\\n'.format(args.user.replace('\\n', ' ')) writer.write(user.encode()) answer =", "'')).encode()) answer = await reader.readline() logging.debug(answer.decode(\"utf-8\")) if answer.decode(\"utf-8\") == 'null\\n': logging.warning(\"Wrong token, let's", "action='store_true', help=\"set a send only mode\") args = parser.parse_args() tasks = [] if", "reader.readline() logging.debug(answer.decode(\"utf-8\")) answer_dict = json.loads(answer) token = answer_dict['account_hash'] logging.debug(token) async with AIOFile(args.token_file, 'w')", "port=port) temp = await reader.readline() logging.debug(temp.decode(\"utf-8\")) if not token: writer.write('\\n'.encode()) await register(reader, writer,", "json.loads(answer) token = answer_dict['account_hash'] logging.debug(token) async with AIOFile(args.token_file, 'w') as _file: await _file.write(token)", "True: try: if not reader or not writer: reader, writer = await asyncio.open_connection(host=host,", "parser.add_argument('--rport', default=5000, type=int, help='Specify port to receive msg') parser.add_argument('--sport', default=5050, type=int, help='Specify port", "default=5000, type=int, help='Specify port to receive msg') parser.add_argument('--sport', default=5050, type=int, help='Specify port to", "= 0 reader, writer = None, None async with AIOFile(\"text.txt\", 'a') as _file:", "await _file.write('[{}] {}'.format(time_now, text.decode(\"utf-8\"))) print(text.decode(\"utf-8\")) except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s seconds', 2 **", "raise async def register(reader, writer, args): if not args.user: logging.error(\"It's obligated to specidy", "answer = await reader.readline() logging.debug(answer.decode(\"utf-8\")) if answer.decode(\"utf-8\") == 'null\\n': logging.warning(\"Wrong token, let's get", "task in tasks: await task if __name__ == \"__main__\": try: asyncio.run(main()) except KeyboardInterrupt:", "try: async with AIOFile(args.token_file, 'r') as _file: token = await _file.read() except FileNotFoundError:", "type=int, help='Specify port to receive msg') parser.add_argument('--sport', default=5050, type=int, help='Specify port to send", "sys import json from aiofile import AIOFile async def read_from_socket(host, port): timer =", "secret chat') parser.add_argument('--host', default='minechat.dvmn.org', help='Host to connect') parser.add_argument('--rport', default=5000, type=int, help='Specify port to", "logging.debug(temp.decode(\"utf-8\")) if not token: writer.write('\\n'.encode()) await register(reader, writer, args) else: await authorise(reader, writer,", "while True: try: if not reader or not writer: reader, writer = await", "_file.write(token) async def authorise(reader, writer, args, token): writer.write('{}\\n'.format(token.replace('\\n', '')).encode()) answer = await reader.readline()", "token\") parser.add_argument('--text', help=\"set a text to send\") parser.add_argument('--send_only', action='store_true', help=\"set a send only", "argparse.ArgumentParser(description='connect to secret chat') parser.add_argument('--host', default='minechat.dvmn.org', help='Host to connect') parser.add_argument('--rport', default=5000, type=int, help='Specify", "await asyncio.sleep(2 ** timer) timer += 1 except asyncio.CancelledError: writer.close() raise async def", "None while True: try: reader, writer = await asyncio.open_connection(host=host, port=port) temp = await", "return except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s seconds', 2 ** timer) await asyncio.sleep(2 **", "parser.add_argument('--text', help=\"set a text to send\") parser.add_argument('--send_only', action='store_true', help=\"set a send only mode\")", "not have the correct token file\") logging.error('exiting') sys.exit() temp = await reader.readline() logging.debug(temp.decode(\"utf-8\"))", "help=\"set a username, it's oblicated for first run\") parser.add_argument('--token_file', default=\"token.txt\", help=\"set a file", "args, token): writer.write('{}\\n'.format(token.replace('\\n', '')).encode()) answer = await reader.readline() logging.debug(answer.decode(\"utf-8\")) if answer.decode(\"utf-8\") == 'null\\n':", "port): timer = 0 reader, writer = None, None async with AIOFile(\"text.txt\", 'a')", "one\") await register(reader, writer, args) async def main(): logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser(description='connect to", "if answer.decode(\"utf-8\") == 'null\\n': logging.warning(\"Wrong token, let's get another one\") await register(reader, writer,", "connect') parser.add_argument('--rport', default=5000, type=int, help='Specify port to receive msg') parser.add_argument('--sport', default=5050, type=int, help='Specify", "await reader.readline() logging.debug(answer.decode(\"utf-8\")) answer_dict = json.loads(answer) token = answer_dict['account_hash'] logging.debug(token) async with AIOFile(args.token_file,", "writer, args, token): writer.write('{}\\n'.format(token.replace('\\n', '')).encode()) answer = await reader.readline() logging.debug(answer.decode(\"utf-8\")) if answer.decode(\"utf-8\") ==", "as _file: token = await _file.read() except FileNotFoundError: token = None while True:", "def submit_message(host, port, args): timer = 0 try: async with AIOFile(args.token_file, 'r') as", "= await reader.readline() logging.debug(answer.decode(\"utf-8\")) if answer.decode(\"utf-8\") == 'null\\n': logging.warning(\"Wrong token, let's get another", "print(text.decode(\"utf-8\")) except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s seconds', 2 ** timer) await asyncio.sleep(2 **", "args.text: tasks.append(asyncio.create_task(submit_message(args.host, args.sport, args))) for task in tasks: await task if __name__ ==", "async with AIOFile(\"text.txt\", 'a') as _file: while True: try: if not reader or", "chat') parser.add_argument('--host', default='minechat.dvmn.org', help='Host to connect') parser.add_argument('--rport', default=5000, type=int, help='Specify port to receive", "timer += 1 except asyncio.CancelledError: writer.close() raise async def submit_message(host, port, args): timer", "await register(reader, writer, args) else: await authorise(reader, writer, args, token) writer.write('{}\\n\\n'.format(args.text.replace('\\n', ' ')).encode())", "with token\") parser.add_argument('--text', help=\"set a text to send\") parser.add_argument('--send_only', action='store_true', help=\"set a send", "a file with token\") parser.add_argument('--text', help=\"set a text to send\") parser.add_argument('--send_only', action='store_true', help=\"set", "logging.warning(\"Wrong token, let's get another one\") await register(reader, writer, args) async def main():", "import AIOFile async def read_from_socket(host, port): timer = 0 reader, writer = None,", "has been successfully sent') return except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s seconds', 2 **", "been successfully sent') return except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s seconds', 2 ** timer)", "1 except asyncio.CancelledError: writer.close() raise async def register(reader, writer, args): if not args.user:", "logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser(description='connect to secret chat') parser.add_argument('--host', default='minechat.dvmn.org', help='Host to connect') parser.add_argument('--rport',", "async def submit_message(host, port, args): timer = 0 try: async with AIOFile(args.token_file, 'r')", "except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s seconds', 2 ** timer) await asyncio.sleep(2 ** timer)", "_file.read() except FileNotFoundError: token = None while True: try: reader, writer = await", "obligated to specidy login if you do not have the correct token file\")", "answer_dict['account_hash'] logging.debug(token) async with AIOFile(args.token_file, 'w') as _file: await _file.write(token) async def authorise(reader,", "text = await reader.readline() time_now = datetime.datetime.now().strftime(\"%y.%m.%d %H.%M\") await _file.write('[{}] {}'.format(time_now, text.decode(\"utf-8\"))) print(text.decode(\"utf-8\"))", "await reader.readline() time_now = datetime.datetime.now().strftime(\"%y.%m.%d %H.%M\") await _file.write('[{}] {}'.format(time_now, text.decode(\"utf-8\"))) print(text.decode(\"utf-8\")) except (ConnectionRefusedError,", "reader, writer = None, None timer += 1 except asyncio.CancelledError: writer.close() raise async", "writer = None, None timer += 1 except asyncio.CancelledError: writer.close() raise async def", "not args.user: logging.error(\"It's obligated to specidy login if you do not have the", "= await reader.readline() logging.debug(answer.decode(\"utf-8\")) answer_dict = json.loads(answer) token = answer_dict['account_hash'] logging.debug(token) async with", "oblicated for first run\") parser.add_argument('--token_file', default=\"token.txt\", help=\"set a file with token\") parser.add_argument('--text', help=\"set", "logging.warning('sleep %s seconds', 2 ** timer) await asyncio.sleep(2 ** timer) reader, writer =", "= await _file.read() except FileNotFoundError: token = None while True: try: reader, writer", "let's get another one\") await register(reader, writer, args) async def main(): logging.basicConfig(level=logging.INFO) parser", "first run\") parser.add_argument('--token_file', default=\"token.txt\", help=\"set a file with token\") parser.add_argument('--text', help=\"set a text", "= [] if not args.send_only: tasks.append(asyncio.create_task(read_from_socket(args.host, args.rport))) if args.text: tasks.append(asyncio.create_task(submit_message(args.host, args.sport, args))) for", "send msg') parser.add_argument('--user', help=\"set a username, it's oblicated for first run\") parser.add_argument('--token_file', default=\"token.txt\",", "None, None async with AIOFile(\"text.txt\", 'a') as _file: while True: try: if not", "reader, writer = await asyncio.open_connection(host=host, port=port) temp = await reader.readline() logging.debug(temp.decode(\"utf-8\")) if not", "login if you do not have the correct token file\") logging.error('exiting') sys.exit() temp", "ConnectionResetError): logging.warning('sleep %s seconds', 2 ** timer) await asyncio.sleep(2 ** timer) timer +=", "args.send_only: tasks.append(asyncio.create_task(read_from_socket(args.host, args.rport))) if args.text: tasks.append(asyncio.create_task(submit_message(args.host, args.sport, args))) for task in tasks: await", "import argparse import asyncio import logging import datetime import sys import json from", "args = parser.parse_args() tasks = [] if not args.send_only: tasks.append(asyncio.create_task(read_from_socket(args.host, args.rport))) if args.text:", "with AIOFile(args.token_file, 'r') as _file: token = await _file.read() except FileNotFoundError: token =", "await reader.readline() logging.debug(temp.decode(\"utf-8\")) user = '{}\\n'.format(args.user.replace('\\n', ' ')) writer.write(user.encode()) answer = await reader.readline()", "FileNotFoundError: token = None while True: try: reader, writer = await asyncio.open_connection(host=host, port=port)", "writer, args) async def main(): logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser(description='connect to secret chat') parser.add_argument('--host',", "' ')) writer.write(user.encode()) answer = await reader.readline() logging.debug(answer.decode(\"utf-8\")) answer_dict = json.loads(answer) token =", "help='Specify port to receive msg') parser.add_argument('--sport', default=5050, type=int, help='Specify port to send msg')", "parser.add_argument('--send_only', action='store_true', help=\"set a send only mode\") args = parser.parse_args() tasks = []", "parser.parse_args() tasks = [] if not args.send_only: tasks.append(asyncio.create_task(read_from_socket(args.host, args.rport))) if args.text: tasks.append(asyncio.create_task(submit_message(args.host, args.sport,", "await asyncio.open_connection(host=host, port=port) temp = await reader.readline() logging.debug(temp.decode(\"utf-8\")) if not token: writer.write('\\n'.encode()) await", "timer) reader, writer = None, None timer += 1 except asyncio.CancelledError: writer.close() raise", "(ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s seconds', 2 ** timer) await asyncio.sleep(2 ** timer) timer", "not reader or not writer: reader, writer = await asyncio.open_connection(host=host, port=port) text =", "from aiofile import AIOFile async def read_from_socket(host, port): timer = 0 reader, writer", "token, let's get another one\") await register(reader, writer, args) async def main(): logging.basicConfig(level=logging.INFO)", "only mode\") args = parser.parse_args() tasks = [] if not args.send_only: tasks.append(asyncio.create_task(read_from_socket(args.host, args.rport)))", "reader, writer = None, None async with AIOFile(\"text.txt\", 'a') as _file: while True:", "timer = 0 try: async with AIOFile(args.token_file, 'r') as _file: token = await", "not args.send_only: tasks.append(asyncio.create_task(read_from_socket(args.host, args.rport))) if args.text: tasks.append(asyncio.create_task(submit_message(args.host, args.sport, args))) for task in tasks:", "logging.error('exiting') sys.exit() temp = await reader.readline() logging.debug(temp.decode(\"utf-8\")) user = '{}\\n'.format(args.user.replace('\\n', ' ')) writer.write(user.encode())", "args) async def main(): logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser(description='connect to secret chat') parser.add_argument('--host', default='minechat.dvmn.org',", "the correct token file\") logging.error('exiting') sys.exit() temp = await reader.readline() logging.debug(temp.decode(\"utf-8\")) user =", "specidy login if you do not have the correct token file\") logging.error('exiting') sys.exit()", "args, token) writer.write('{}\\n\\n'.format(args.text.replace('\\n', ' ')).encode()) logging.info('text has been successfully sent') return except (ConnectionRefusedError,", "AIOFile(args.token_file, 'r') as _file: token = await _file.read() except FileNotFoundError: token = None", "ConnectionResetError): logging.warning('sleep %s seconds', 2 ** timer) await asyncio.sleep(2 ** timer) reader, writer", "sent') return except (ConnectionRefusedError, ConnectionResetError): logging.warning('sleep %s seconds', 2 ** timer) await asyncio.sleep(2", "else: await authorise(reader, writer, args, token) writer.write('{}\\n\\n'.format(args.text.replace('\\n', ' ')).encode()) logging.info('text has been successfully", "await asyncio.open_connection(host=host, port=port) text = await reader.readline() time_now = datetime.datetime.now().strftime(\"%y.%m.%d %H.%M\") await _file.write('[{}]", "user = '{}\\n'.format(args.user.replace('\\n', ' ')) writer.write(user.encode()) answer = await reader.readline() logging.debug(answer.decode(\"utf-8\")) answer_dict =" ]
[ "name(self) -> str: \"\"\" The name you assign to the group during creation.", "pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import", "specific Group resource in Oracle Cloud Infrastructure Identity service. Gets the specified group's", "not edit by hand unless you're certain you know what you are doing!", "__ret__ = pulumi.runtime.invoke('oci:identity/getGroup:getGroup', __args__, opts=opts, typ=GetGroupResult).value return AwaitableGetGroupResult( compartment_id=__ret__.compartment_id, defined_tags=__ret__.defined_tags, description=__ret__.description, freeform_tags=__ret__.freeform_tags, group_id=__ret__.group_id,", "None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetGroupResult: \"\"\" This data source provides details", "group's current state. \"\"\" return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"timeCreated\") def time_created(self) -> str:", "be a str\") pulumi.set(__self__, \"time_created\", time_created) @property @pulumi.getter(name=\"compartmentId\") def compartment_id(self) -> str: \"\"\"", "time the group was created, in the format defined by RFC3339. Example: `2016-08-25T21:10:29.600Z`", "of values returned by getGroup. \"\"\" def __init__(__self__, compartment_id=None, defined_tags=None, description=None, freeform_tags=None, group_id=None,", "@property @pulumi.getter def name(self) -> str: \"\"\" The name you assign to the", "opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:identity/getGroup:getGroup',", "a dict\") pulumi.set(__self__, \"defined_tags\", defined_tags) if description and not isinstance(description, str): raise TypeError(\"Expected", "to be a dict\") pulumi.set(__self__, \"defined_tags\", defined_tags) if description and not isinstance(description, str):", "raise TypeError(\"Expected argument 'inactive_state' to be a str\") pulumi.set(__self__, \"inactive_state\", inactive_state) if name", "list of all the users in the group. To do that, use [ListUserGroupMemberships](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/UserGroupMembership/ListUserGroupMemberships)", "that, use [ListUserGroupMemberships](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/UserGroupMembership/ListUserGroupMemberships) and provide the group's OCID as a query parameter in", "be unique, and it's changeable. \"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter(name=\"freeformTags\") def freeform_tags(self)", "TypeError(\"Expected argument 'freeform_tags' to be a dict\") pulumi.set(__self__, \"freeform_tags\", freeform_tags) if group_id and", "argument 'time_created' to be a str\") pulumi.set(__self__, \"time_created\", time_created) @property @pulumi.getter(name=\"compartmentId\") def compartment_id(self)", "defined_tags=None, description=None, freeform_tags=None, group_id=None, id=None, inactive_state=None, name=None, state=None, time_created=None): if compartment_id and not", "'freeform_tags' to be a dict\") pulumi.set(__self__, \"freeform_tags\", freeform_tags) if group_id and not isinstance(group_id,", "return GetGroupResult( compartment_id=self.compartment_id, defined_tags=self.defined_tags, description=self.description, freeform_tags=self.freeform_tags, group_id=self.group_id, id=self.id, inactive_state=self.inactive_state, name=self.name, state=self.state, time_created=self.time_created) def", "the group during creation. The name must be unique across all groups in", "the specified group's information. This operation does not return a list of all", "OCID of the group. \"\"\" __args__ = dict() __args__['groupId'] = group_id if opts", "group_id: The OCID of the group. \"\"\" __args__ = dict() __args__['groupId'] = group_id", "*** # *** Do not edit by hand unless you're certain you know", "to the group during creation. The name must be unique across all groups", "`{\"Operations.CostCenter\": \"42\"}` \"\"\" return pulumi.get(self, \"defined_tags\") @property @pulumi.getter def description(self) -> str: \"\"\"", "resource. Each key is predefined and scoped to a namespace. For more information,", "state=self.state, time_created=self.time_created) def get_group(group_id: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetGroupResult:", "Mapping, Optional, Sequence, Union, overload from .. import _utilities __all__ = [ 'GetGroupResult',", "str): raise TypeError(\"Expected argument 'compartment_id' to be a str\") pulumi.set(__self__, \"compartment_id\", compartment_id) if", "[Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Operations.CostCenter\": \"42\"}` \"\"\" return pulumi.get(self, \"defined_tags\") @property @pulumi.getter def description(self)", "to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Operations.CostCenter\": \"42\"}` \"\"\"", "None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ =", "predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\":", "Each tag is a simple key-value pair with no predefined name, type, or", "be a str\") pulumi.set(__self__, \"description\", description) if freeform_tags and not isinstance(freeform_tags, dict): raise", "return pulumi.get(self, \"id\") @property @pulumi.getter(name=\"inactiveState\") def inactive_state(self) -> str: \"\"\" The detailed status", "\"\"\" return pulumi.get(self, \"defined_tags\") @property @pulumi.getter def description(self) -> str: \"\"\" The description", "For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\": \"Finance\"}` \"\"\" return pulumi.get(self, \"freeform_tags\")", "be a str\") pulumi.set(__self__, \"inactive_state\", inactive_state) if name and not isinstance(name, str): raise", "format defined by RFC3339. Example: `2016-08-25T21:10:29.600Z` \"\"\" return pulumi.get(self, \"time_created\") class AwaitableGetGroupResult(GetGroupResult): #", "if id and not isinstance(id, str): raise TypeError(\"Expected argument 'id' to be a", "str: return pulumi.get(self, \"group_id\") @property @pulumi.getter def id(self) -> str: \"\"\" The OCID", "'AwaitableGetGroupResult', 'get_group', ] @pulumi.output_type class GetGroupResult: \"\"\" A collection of values returned by", "Example: `{\"Department\": \"Finance\"}` \"\"\" return pulumi.get(self, \"freeform_tags\") @property @pulumi.getter(name=\"groupId\") def group_id(self) -> str:", "dict\") pulumi.set(__self__, \"defined_tags\", defined_tags) if description and not isinstance(description, str): raise TypeError(\"Expected argument", "return pulumi.get(self, \"inactive_state\") @property @pulumi.getter def name(self) -> str: \"\"\" The name you", "compartment_id) if defined_tags and not isinstance(defined_tags, dict): raise TypeError(\"Expected argument 'defined_tags' to be", "pulumi.get(self, \"name\") @property @pulumi.getter def state(self) -> str: \"\"\" The group's current state.", "request. ## Example Usage ```python import pulumi import pulumi_oci as oci test_group =", "'description' to be a str\") pulumi.set(__self__, \"description\", description) if freeform_tags and not isinstance(freeform_tags,", "return pulumi.get(self, \"freeform_tags\") @property @pulumi.getter(name=\"groupId\") def group_id(self) -> str: return pulumi.get(self, \"group_id\") @property", "namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\": \"Finance\"}` \"\"\" return pulumi.get(self,", "= pulumi.runtime.invoke('oci:identity/getGroup:getGroup', __args__, opts=opts, typ=GetGroupResult).value return AwaitableGetGroupResult( compartment_id=__ret__.compartment_id, defined_tags=__ret__.defined_tags, description=__ret__.description, freeform_tags=__ret__.freeform_tags, group_id=__ret__.group_id, id=__ret__.id,", "_utilities __all__ = [ 'GetGroupResult', 'AwaitableGetGroupResult', 'get_group', ] @pulumi.output_type class GetGroupResult: \"\"\" A", "isinstance(group_id, str): raise TypeError(\"Expected argument 'group_id' to be a str\") pulumi.set(__self__, \"group_id\", group_id)", "Infrastructure Identity service. Gets the specified group's information. This operation does not return", "-> Mapping[str, Any]: \"\"\" Free-form tags for this resource. Each tag is a", "time_created(self) -> str: \"\"\" Date and time the group was created, in the", "dict() __args__['groupId'] = group_id if opts is None: opts = pulumi.InvokeOptions() if opts.version", "argument 'freeform_tags' to be a dict\") pulumi.set(__self__, \"freeform_tags\", freeform_tags) if group_id and not", "@property @pulumi.getter(name=\"groupId\") def group_id(self) -> str: return pulumi.get(self, \"group_id\") @property @pulumi.getter def id(self)", "str): raise TypeError(\"Expected argument 'name' to be a str\") pulumi.set(__self__, \"name\", name) if", "@property @pulumi.getter(name=\"compartmentId\") def compartment_id(self) -> str: \"\"\" The OCID of the tenancy containing", "The OCID of the tenancy containing the group. \"\"\" return pulumi.get(self, \"compartment_id\") @property", "group. \"\"\" return pulumi.get(self, \"id\") @property @pulumi.getter(name=\"inactiveState\") def inactive_state(self) -> str: \"\"\" The", "(tfgen) Tool. *** # *** Do not edit by hand unless you're certain", "argument 'inactive_state' to be a str\") pulumi.set(__self__, \"inactive_state\", inactive_state) if name and not", "for this resource. Each key is predefined and scoped to a namespace. For", "opts=opts, typ=GetGroupResult).value return AwaitableGetGroupResult( compartment_id=__ret__.compartment_id, defined_tags=__ret__.defined_tags, description=__ret__.description, freeform_tags=__ret__.freeform_tags, group_id=__ret__.group_id, id=__ret__.id, inactive_state=__ret__.inactive_state, name=__ret__.name, state=__ret__.state,", "def state(self) -> str: \"\"\" The group's current state. \"\"\" return pulumi.get(self, \"state\")", "AwaitableGetGroupResult: \"\"\" This data source provides details about a specific Group resource in", "not isinstance(group_id, str): raise TypeError(\"Expected argument 'group_id' to be a str\") pulumi.set(__self__, \"group_id\",", "disable=using-constant-test def __await__(self): if False: yield self return GetGroupResult( compartment_id=self.compartment_id, defined_tags=self.defined_tags, description=self.description, freeform_tags=self.freeform_tags,", "# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen)", "TypeError(\"Expected argument 'compartment_id' to be a str\") pulumi.set(__self__, \"compartment_id\", compartment_id) if defined_tags and", "compartment_id=None, defined_tags=None, description=None, freeform_tags=None, group_id=None, id=None, inactive_state=None, name=None, state=None, time_created=None): if compartment_id and", "the group. Does not have to be unique, and it's changeable. \"\"\" return", "tags for this resource. Each tag is a simple key-value pair with no", "isinstance(name, str): raise TypeError(\"Expected argument 'name' to be a str\") pulumi.set(__self__, \"name\", name)", "'get_group', ] @pulumi.output_type class GetGroupResult: \"\"\" A collection of values returned by getGroup.", "str): raise TypeError(\"Expected argument 'inactive_state' to be a str\") pulumi.set(__self__, \"inactive_state\", inactive_state) if", "] @pulumi.output_type class GetGroupResult: \"\"\" A collection of values returned by getGroup. \"\"\"", "str\") pulumi.set(__self__, \"group_id\", group_id) if id and not isinstance(id, str): raise TypeError(\"Expected argument", "@property @pulumi.getter def id(self) -> str: \"\"\" The OCID of the group. \"\"\"", "'defined_tags' to be a dict\") pulumi.set(__self__, \"defined_tags\", defined_tags) if description and not isinstance(description,", "dict): raise TypeError(\"Expected argument 'freeform_tags' to be a dict\") pulumi.set(__self__, \"freeform_tags\", freeform_tags) if", "'group_id' to be a str\") pulumi.set(__self__, \"group_id\", group_id) if id and not isinstance(id,", "not isinstance(name, str): raise TypeError(\"Expected argument 'name' to be a str\") pulumi.set(__self__, \"name\",", "def group_id(self) -> str: return pulumi.get(self, \"group_id\") @property @pulumi.getter def id(self) -> str:", "group_id(self) -> str: return pulumi.get(self, \"group_id\") @property @pulumi.getter def id(self) -> str: \"\"\"", "be a str\") pulumi.set(__self__, \"id\", id) if inactive_state and not isinstance(inactive_state, str): raise", "pulumi.get(self, \"description\") @property @pulumi.getter(name=\"freeformTags\") def freeform_tags(self) -> Mapping[str, Any]: \"\"\" Free-form tags for", "Any]: \"\"\" Defined tags for this resource. Each key is predefined and scoped", "Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetGroupResult: \"\"\" This data source", "\"state\", state) if time_created and not isinstance(time_created, str): raise TypeError(\"Expected argument 'time_created' to", "return a list of all the users in the group. To do that,", "argument 'group_id' to be a str\") pulumi.set(__self__, \"group_id\", group_id) if id and not", "pair with no predefined name, type, or namespace. For more information, see [Resource", "opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version()", "isinstance(compartment_id, str): raise TypeError(\"Expected argument 'compartment_id' to be a str\") pulumi.set(__self__, \"compartment_id\", compartment_id)", "the tenancy and cannot be changed. \"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter def", "unique across all groups in the tenancy and cannot be changed. \"\"\" return", "by getGroup. \"\"\" def __init__(__self__, compartment_id=None, defined_tags=None, description=None, freeform_tags=None, group_id=None, id=None, inactive_state=None, name=None,", "__all__ = [ 'GetGroupResult', 'AwaitableGetGroupResult', 'get_group', ] @pulumi.output_type class GetGroupResult: \"\"\" A collection", "freeform_tags) if group_id and not isinstance(group_id, str): raise TypeError(\"Expected argument 'group_id' to be", "raise TypeError(\"Expected argument 'id' to be a str\") pulumi.set(__self__, \"id\", id) if inactive_state", "freeform_tags and not isinstance(freeform_tags, dict): raise TypeError(\"Expected argument 'freeform_tags' to be a dict\")", "@property @pulumi.getter(name=\"inactiveState\") def inactive_state(self) -> str: \"\"\" The detailed status of INACTIVE lifecycleState.", "it's changeable. \"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter(name=\"freeformTags\") def freeform_tags(self) -> Mapping[str, Any]:", "@pulumi.getter(name=\"timeCreated\") def time_created(self) -> str: \"\"\" Date and time the group was created,", "and not isinstance(time_created, str): raise TypeError(\"Expected argument 'time_created' to be a str\") pulumi.set(__self__,", "Date and time the group was created, in the format defined by RFC3339.", "= oci.identity.get_group(group_id=oci_identity_group[\"test_group\"][\"id\"]) ``` :param str group_id: The OCID of the group. \"\"\" __args__", "name=None, state=None, time_created=None): if compartment_id and not isinstance(compartment_id, str): raise TypeError(\"Expected argument 'compartment_id'", "Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless", "Oracle Cloud Infrastructure Identity service. Gets the specified group's information. This operation does", "type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\": \"Finance\"}` \"\"\"", "group during creation. The name must be unique across all groups in the", "@pulumi.getter(name=\"compartmentId\") def compartment_id(self) -> str: \"\"\" The OCID of the tenancy containing the", "you assign to the group. Does not have to be unique, and it's", "tags for this resource. Each key is predefined and scoped to a namespace.", "@pulumi.getter(name=\"inactiveState\") def inactive_state(self) -> str: \"\"\" The detailed status of INACTIVE lifecycleState. \"\"\"", "the group was created, in the format defined by RFC3339. Example: `2016-08-25T21:10:29.600Z` \"\"\"", "pulumi.get(self, \"inactive_state\") @property @pulumi.getter def name(self) -> str: \"\"\" The name you assign", "The OCID of the group. \"\"\" __args__ = dict() __args__['groupId'] = group_id if", "The description you assign to the group. Does not have to be unique,", "changeable. \"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter(name=\"freeformTags\") def freeform_tags(self) -> Mapping[str, Any]: \"\"\"", "pulumi.get(self, \"defined_tags\") @property @pulumi.getter def description(self) -> str: \"\"\" The description you assign", "in the request. ## Example Usage ```python import pulumi import pulumi_oci as oci", "typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities __all__", "a str\") pulumi.set(__self__, \"compartment_id\", compartment_id) if defined_tags and not isinstance(defined_tags, dict): raise TypeError(\"Expected", "and not isinstance(freeform_tags, dict): raise TypeError(\"Expected argument 'freeform_tags' to be a dict\") pulumi.set(__self__,", "inactive_state=self.inactive_state, name=self.name, state=self.state, time_created=self.time_created) def get_group(group_id: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None)", "from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities", "\"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter def state(self) -> str: \"\"\" The group's", "class GetGroupResult: \"\"\" A collection of values returned by getGroup. \"\"\" def __init__(__self__,", "Optional, Sequence, Union, overload from .. import _utilities __all__ = [ 'GetGroupResult', 'AwaitableGetGroupResult',", "see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Operations.CostCenter\": \"42\"}` \"\"\" return pulumi.get(self, \"defined_tags\") @property @pulumi.getter def", "you're certain you know what you are doing! *** import warnings import pulumi", "you assign to the group during creation. The name must be unique across", "unless you're certain you know what you are doing! *** import warnings import", "if description and not isinstance(description, str): raise TypeError(\"Expected argument 'description' to be a", "be a str\") pulumi.set(__self__, \"group_id\", group_id) if id and not isinstance(id, str): raise", "containing the group. \"\"\" return pulumi.get(self, \"compartment_id\") @property @pulumi.getter(name=\"definedTags\") def defined_tags(self) -> Mapping[str,", "assign to the group. Does not have to be unique, and it's changeable.", "and it's changeable. \"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter(name=\"freeformTags\") def freeform_tags(self) -> Mapping[str,", "tenancy and cannot be changed. \"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter def state(self)", "and not isinstance(id, str): raise TypeError(\"Expected argument 'id' to be a str\") pulumi.set(__self__,", "'id' to be a str\") pulumi.set(__self__, \"id\", id) if inactive_state and not isinstance(inactive_state,", "state. \"\"\" return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"timeCreated\") def time_created(self) -> str: \"\"\" Date", "not isinstance(state, str): raise TypeError(\"Expected argument 'state' to be a str\") pulumi.set(__self__, \"state\",", "[ 'GetGroupResult', 'AwaitableGetGroupResult', 'get_group', ] @pulumi.output_type class GetGroupResult: \"\"\" A collection of values", "across all groups in the tenancy and cannot be changed. \"\"\" return pulumi.get(self,", "a str\") pulumi.set(__self__, \"id\", id) if inactive_state and not isinstance(inactive_state, str): raise TypeError(\"Expected", "the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by", "be a str\") pulumi.set(__self__, \"name\", name) if state and not isinstance(state, str): raise", "opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:identity/getGroup:getGroup', __args__, opts=opts, typ=GetGroupResult).value return AwaitableGetGroupResult( compartment_id=__ret__.compartment_id, defined_tags=__ret__.defined_tags,", "OCID as a query parameter in the request. ## Example Usage ```python import", "= [ 'GetGroupResult', 'AwaitableGetGroupResult', 'get_group', ] @pulumi.output_type class GetGroupResult: \"\"\" A collection of", "state(self) -> str: \"\"\" The group's current state. \"\"\" return pulumi.get(self, \"state\") @property", "isinstance(inactive_state, str): raise TypeError(\"Expected argument 'inactive_state' to be a str\") pulumi.set(__self__, \"inactive_state\", inactive_state)", "and not isinstance(group_id, str): raise TypeError(\"Expected argument 'group_id' to be a str\") pulumi.set(__self__,", "\"defined_tags\", defined_tags) if description and not isinstance(description, str): raise TypeError(\"Expected argument 'description' to", "for this resource. Each tag is a simple key-value pair with no predefined", "not isinstance(compartment_id, str): raise TypeError(\"Expected argument 'compartment_id' to be a str\") pulumi.set(__self__, \"compartment_id\",", "to be a str\") pulumi.set(__self__, \"id\", id) if inactive_state and not isinstance(inactive_state, str):", "is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).", "OCID of the group. \"\"\" return pulumi.get(self, \"id\") @property @pulumi.getter(name=\"inactiveState\") def inactive_state(self) ->", "as oci test_group = oci.identity.get_group(group_id=oci_identity_group[\"test_group\"][\"id\"]) ``` :param str group_id: The OCID of the", "str: \"\"\" The description you assign to the group. Does not have to", "and provide the group's OCID as a query parameter in the request. ##", "group. To do that, use [ListUserGroupMemberships](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/UserGroupMembership/ListUserGroupMemberships) and provide the group's OCID as a", "time_created=self.time_created) def get_group(group_id: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetGroupResult: \"\"\"", "__await__(self): if False: yield self return GetGroupResult( compartment_id=self.compartment_id, defined_tags=self.defined_tags, description=self.description, freeform_tags=self.freeform_tags, group_id=self.group_id, id=self.id,", "\"\"\" def __init__(__self__, compartment_id=None, defined_tags=None, description=None, freeform_tags=None, group_id=None, id=None, inactive_state=None, name=None, state=None, time_created=None):", "more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\": \"Finance\"}` \"\"\" return pulumi.get(self, \"freeform_tags\") @property", "by hand unless you're certain you know what you are doing! *** import", "TypeError(\"Expected argument 'name' to be a str\") pulumi.set(__self__, \"name\", name) if state and", "'name' to be a str\") pulumi.set(__self__, \"name\", name) if state and not isinstance(state,", "RFC3339. Example: `2016-08-25T21:10:29.600Z` \"\"\" return pulumi.get(self, \"time_created\") class AwaitableGetGroupResult(GetGroupResult): # pylint: disable=using-constant-test def", "*** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional,", "to be a str\") pulumi.set(__self__, \"compartment_id\", compartment_id) if defined_tags and not isinstance(defined_tags, dict):", "the users in the group. To do that, use [ListUserGroupMemberships](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/UserGroupMembership/ListUserGroupMemberships) and provide the", "the group. \"\"\" __args__ = dict() __args__['groupId'] = group_id if opts is None:", "you are doing! *** import warnings import pulumi import pulumi.runtime from typing import", "group_id if opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version", "raise TypeError(\"Expected argument 'compartment_id' to be a str\") pulumi.set(__self__, \"compartment_id\", compartment_id) if defined_tags", "Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're", "service. Gets the specified group's information. This operation does not return a list", "@pulumi.getter def id(self) -> str: \"\"\" The OCID of the group. \"\"\" return", "\"42\"}` \"\"\" return pulumi.get(self, \"defined_tags\") @property @pulumi.getter def description(self) -> str: \"\"\" The", "TypeError(\"Expected argument 'group_id' to be a str\") pulumi.set(__self__, \"group_id\", group_id) if id and", "inactive_state(self) -> str: \"\"\" The detailed status of INACTIVE lifecycleState. \"\"\" return pulumi.get(self,", "to be a str\") pulumi.set(__self__, \"time_created\", time_created) @property @pulumi.getter(name=\"compartmentId\") def compartment_id(self) -> str:", "and not isinstance(inactive_state, str): raise TypeError(\"Expected argument 'inactive_state' to be a str\") pulumi.set(__self__,", "```python import pulumi import pulumi_oci as oci test_group = oci.identity.get_group(group_id=oci_identity_group[\"test_group\"][\"id\"]) ``` :param str", "from .. import _utilities __all__ = [ 'GetGroupResult', 'AwaitableGetGroupResult', 'get_group', ] @pulumi.output_type class", "def freeform_tags(self) -> Mapping[str, Any]: \"\"\" Free-form tags for this resource. Each tag", "this resource. Each key is predefined and scoped to a namespace. For more", "group. \"\"\" __args__ = dict() __args__['groupId'] = group_id if opts is None: opts", "[Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\": \"Finance\"}` \"\"\" return pulumi.get(self, \"freeform_tags\") @property @pulumi.getter(name=\"groupId\") def group_id(self)", "a str\") pulumi.set(__self__, \"time_created\", time_created) @property @pulumi.getter(name=\"compartmentId\") def compartment_id(self) -> str: \"\"\" The", "# *** Do not edit by hand unless you're certain you know what", "a str\") pulumi.set(__self__, \"description\", description) if freeform_tags and not isinstance(freeform_tags, dict): raise TypeError(\"Expected", "str\") pulumi.set(__self__, \"inactive_state\", inactive_state) if name and not isinstance(name, str): raise TypeError(\"Expected argument", "freeform_tags=None, group_id=None, id=None, inactive_state=None, name=None, state=None, time_created=None): if compartment_id and not isinstance(compartment_id, str):", "coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge", "inactive_state) if name and not isinstance(name, str): raise TypeError(\"Expected argument 'name' to be", "def description(self) -> str: \"\"\" The description you assign to the group. Does", "\"\"\" __args__ = dict() __args__['groupId'] = group_id if opts is None: opts =", "defined_tags and not isinstance(defined_tags, dict): raise TypeError(\"Expected argument 'defined_tags' to be a dict\")", "to the group. Does not have to be unique, and it's changeable. \"\"\"", "*** Do not edit by hand unless you're certain you know what you", "\"compartment_id\") @property @pulumi.getter(name=\"definedTags\") def defined_tags(self) -> Mapping[str, Any]: \"\"\" Defined tags for this", "inactive_state=None, name=None, state=None, time_created=None): if compartment_id and not isinstance(compartment_id, str): raise TypeError(\"Expected argument", "\"compartment_id\", compartment_id) if defined_tags and not isinstance(defined_tags, dict): raise TypeError(\"Expected argument 'defined_tags' to", "defined_tags) if description and not isinstance(description, str): raise TypeError(\"Expected argument 'description' to be", "freeform_tags(self) -> Mapping[str, Any]: \"\"\" Free-form tags for this resource. Each tag is", "return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"timeCreated\") def time_created(self) -> str: \"\"\" Date and time", "all groups in the tenancy and cannot be changed. \"\"\" return pulumi.get(self, \"name\")", "str: \"\"\" The OCID of the tenancy containing the group. \"\"\" return pulumi.get(self,", "and not isinstance(defined_tags, dict): raise TypeError(\"Expected argument 'defined_tags' to be a dict\") pulumi.set(__self__,", "no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example:", "This operation does not return a list of all the users in the", "str: \"\"\" The detailed status of INACTIVE lifecycleState. \"\"\" return pulumi.get(self, \"inactive_state\") @property", "\"freeform_tags\", freeform_tags) if group_id and not isinstance(group_id, str): raise TypeError(\"Expected argument 'group_id' to", "detailed status of INACTIVE lifecycleState. \"\"\" return pulumi.get(self, \"inactive_state\") @property @pulumi.getter def name(self)", "INACTIVE lifecycleState. \"\"\" return pulumi.get(self, \"inactive_state\") @property @pulumi.getter def name(self) -> str: \"\"\"", "\"\"\" Free-form tags for this resource. Each tag is a simple key-value pair", "__args__, opts=opts, typ=GetGroupResult).value return AwaitableGetGroupResult( compartment_id=__ret__.compartment_id, defined_tags=__ret__.defined_tags, description=__ret__.description, freeform_tags=__ret__.freeform_tags, group_id=__ret__.group_id, id=__ret__.id, inactive_state=__ret__.inactive_state, name=__ret__.name,", "`2016-08-25T21:10:29.600Z` \"\"\" return pulumi.get(self, \"time_created\") class AwaitableGetGroupResult(GetGroupResult): # pylint: disable=using-constant-test def __await__(self): if", "str group_id: The OCID of the group. \"\"\" __args__ = dict() __args__['groupId'] =", "-> str: \"\"\" The group's current state. \"\"\" return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"timeCreated\")", "-> str: \"\"\" The detailed status of INACTIVE lifecycleState. \"\"\" return pulumi.get(self, \"inactive_state\")", "id) if inactive_state and not isinstance(inactive_state, str): raise TypeError(\"Expected argument 'inactive_state' to be", "raise TypeError(\"Expected argument 'defined_tags' to be a dict\") pulumi.set(__self__, \"defined_tags\", defined_tags) if description", "not return a list of all the users in the group. To do", "@pulumi.getter def state(self) -> str: \"\"\" The group's current state. \"\"\" return pulumi.get(self,", "\"\"\" return pulumi.get(self, \"id\") @property @pulumi.getter(name=\"inactiveState\") def inactive_state(self) -> str: \"\"\" The detailed", "isinstance(description, str): raise TypeError(\"Expected argument 'description' to be a str\") pulumi.set(__self__, \"description\", description)", "import pulumi import pulumi_oci as oci test_group = oci.identity.get_group(group_id=oci_identity_group[\"test_group\"][\"id\"]) ``` :param str group_id:", "operation does not return a list of all the users in the group.", "-> str: \"\"\" The description you assign to the group. Does not have", "= None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetGroupResult: \"\"\" This data source provides", "state=None, time_created=None): if compartment_id and not isinstance(compartment_id, str): raise TypeError(\"Expected argument 'compartment_id' to", "Each key is predefined and scoped to a namespace. For more information, see", "details about a specific Group resource in Oracle Cloud Infrastructure Identity service. Gets", "TypeError(\"Expected argument 'id' to be a str\") pulumi.set(__self__, \"id\", id) if inactive_state and", ":param str group_id: The OCID of the group. \"\"\" __args__ = dict() __args__['groupId']", "Tool. *** # *** Do not edit by hand unless you're certain you", "is a simple key-value pair with no predefined name, type, or namespace. For", "pulumi.set(__self__, \"group_id\", group_id) if id and not isinstance(id, str): raise TypeError(\"Expected argument 'id'", "group was created, in the format defined by RFC3339. Example: `2016-08-25T21:10:29.600Z` \"\"\" return", "a simple key-value pair with no predefined name, type, or namespace. For more", "a list of all the users in the group. To do that, use", "id=self.id, inactive_state=self.inactive_state, name=self.name, state=self.state, time_created=self.time_created) def get_group(group_id: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] =", "be a str\") pulumi.set(__self__, \"state\", state) if time_created and not isinstance(time_created, str): raise", "id=None, inactive_state=None, name=None, state=None, time_created=None): if compartment_id and not isinstance(compartment_id, str): raise TypeError(\"Expected", "argument 'compartment_id' to be a str\") pulumi.set(__self__, \"compartment_id\", compartment_id) if defined_tags and not", "-> str: \"\"\" Date and time the group was created, in the format", "= dict() __args__['groupId'] = group_id if opts is None: opts = pulumi.InvokeOptions() if", "None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:identity/getGroup:getGroup', __args__, opts=opts, typ=GetGroupResult).value return AwaitableGetGroupResult( compartment_id=__ret__.compartment_id,", "edit by hand unless you're certain you know what you are doing! ***", "'compartment_id' to be a str\") pulumi.set(__self__, \"compartment_id\", compartment_id) if defined_tags and not isinstance(defined_tags,", "and not isinstance(description, str): raise TypeError(\"Expected argument 'description' to be a str\") pulumi.set(__self__,", "not have to be unique, and it's changeable. \"\"\" return pulumi.get(self, \"description\") @property", "\"\"\" The description you assign to the group. Does not have to be", "def time_created(self) -> str: \"\"\" Date and time the group was created, in", "inactive_state and not isinstance(inactive_state, str): raise TypeError(\"Expected argument 'inactive_state' to be a str\")", "must be unique across all groups in the tenancy and cannot be changed.", "argument 'state' to be a str\") pulumi.set(__self__, \"state\", state) if time_created and not", "description) if freeform_tags and not isinstance(freeform_tags, dict): raise TypeError(\"Expected argument 'freeform_tags' to be", "be a str\") pulumi.set(__self__, \"compartment_id\", compartment_id) if defined_tags and not isinstance(defined_tags, dict): raise", "argument 'defined_tags' to be a dict\") pulumi.set(__self__, \"defined_tags\", defined_tags) if description and not", "to be a dict\") pulumi.set(__self__, \"freeform_tags\", freeform_tags) if group_id and not isinstance(group_id, str):", "\"\"\" The OCID of the tenancy containing the group. \"\"\" return pulumi.get(self, \"compartment_id\")", "tag is a simple key-value pair with no predefined name, type, or namespace.", "all the users in the group. To do that, use [ListUserGroupMemberships](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/UserGroupMembership/ListUserGroupMemberships) and provide", "raise TypeError(\"Expected argument 'description' to be a str\") pulumi.set(__self__, \"description\", description) if freeform_tags", "defined_tags=self.defined_tags, description=self.description, freeform_tags=self.freeform_tags, group_id=self.group_id, id=self.id, inactive_state=self.inactive_state, name=self.name, state=self.state, time_created=self.time_created) def get_group(group_id: Optional[str] =", "raise TypeError(\"Expected argument 'time_created' to be a str\") pulumi.set(__self__, \"time_created\", time_created) @property @pulumi.getter(name=\"compartmentId\")", "@pulumi.getter(name=\"groupId\") def group_id(self) -> str: return pulumi.get(self, \"group_id\") @property @pulumi.getter def id(self) ->", "def name(self) -> str: \"\"\" The name you assign to the group during", "information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Operations.CostCenter\": \"42\"}` \"\"\" return pulumi.get(self, \"defined_tags\") @property @pulumi.getter", "isinstance(time_created, str): raise TypeError(\"Expected argument 'time_created' to be a str\") pulumi.set(__self__, \"time_created\", time_created)", "This data source provides details about a specific Group resource in Oracle Cloud", "and not isinstance(name, str): raise TypeError(\"Expected argument 'name' to be a str\") pulumi.set(__self__,", "pulumi import pulumi_oci as oci test_group = oci.identity.get_group(group_id=oci_identity_group[\"test_group\"][\"id\"]) ``` :param str group_id: The", "\"\"\" The name you assign to the group during creation. The name must", "group's information. This operation does not return a list of all the users", "opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:identity/getGroup:getGroup', __args__, opts=opts, typ=GetGroupResult).value return", "doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping,", "simple key-value pair with no predefined name, type, or namespace. For more information,", "of INACTIVE lifecycleState. \"\"\" return pulumi.get(self, \"inactive_state\") @property @pulumi.getter def name(self) -> str:", "\"id\", id) if inactive_state and not isinstance(inactive_state, str): raise TypeError(\"Expected argument 'inactive_state' to", "The OCID of the group. \"\"\" return pulumi.get(self, \"id\") @property @pulumi.getter(name=\"inactiveState\") def inactive_state(self)", "\"description\") @property @pulumi.getter(name=\"freeformTags\") def freeform_tags(self) -> Mapping[str, Any]: \"\"\" Free-form tags for this", "and cannot be changed. \"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter def state(self) ->", "pulumi.set(__self__, \"id\", id) if inactive_state and not isinstance(inactive_state, str): raise TypeError(\"Expected argument 'inactive_state'", "@pulumi.getter def name(self) -> str: \"\"\" The name you assign to the group", "current state. \"\"\" return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"timeCreated\") def time_created(self) -> str: \"\"\"", "\"inactive_state\") @property @pulumi.getter def name(self) -> str: \"\"\" The name you assign to", "more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Operations.CostCenter\": \"42\"}` \"\"\" return pulumi.get(self, \"defined_tags\") @property", "\"id\") @property @pulumi.getter(name=\"inactiveState\") def inactive_state(self) -> str: \"\"\" The detailed status of INACTIVE", "be changed. \"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter def state(self) -> str: \"\"\"", "\"\"\" return pulumi.get(self, \"compartment_id\") @property @pulumi.getter(name=\"definedTags\") def defined_tags(self) -> Mapping[str, Any]: \"\"\" Defined", "if time_created and not isinstance(time_created, str): raise TypeError(\"Expected argument 'time_created' to be a", "GetGroupResult: \"\"\" A collection of values returned by getGroup. \"\"\" def __init__(__self__, compartment_id=None,", "resource. Each tag is a simple key-value pair with no predefined name, type,", "use [ListUserGroupMemberships](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/UserGroupMembership/ListUserGroupMemberships) and provide the group's OCID as a query parameter in the", "this resource. Each tag is a simple key-value pair with no predefined name,", "## Example Usage ```python import pulumi import pulumi_oci as oci test_group = oci.identity.get_group(group_id=oci_identity_group[\"test_group\"][\"id\"])", "name and not isinstance(name, str): raise TypeError(\"Expected argument 'name' to be a str\")", "isinstance(state, str): raise TypeError(\"Expected argument 'state' to be a str\") pulumi.set(__self__, \"state\", state)", "False: yield self return GetGroupResult( compartment_id=self.compartment_id, defined_tags=self.defined_tags, description=self.description, freeform_tags=self.freeform_tags, group_id=self.group_id, id=self.id, inactive_state=self.inactive_state, name=self.name,", "\"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter(name=\"freeformTags\") def freeform_tags(self) -> Mapping[str, Any]: \"\"\" Free-form", "self return GetGroupResult( compartment_id=self.compartment_id, defined_tags=self.defined_tags, description=self.description, freeform_tags=self.freeform_tags, group_id=self.group_id, id=self.id, inactive_state=self.inactive_state, name=self.name, state=self.state, time_created=self.time_created)", "pulumi.set(__self__, \"name\", name) if state and not isinstance(state, str): raise TypeError(\"Expected argument 'state'", "be unique across all groups in the tenancy and cannot be changed. \"\"\"", "returned by getGroup. \"\"\" def __init__(__self__, compartment_id=None, defined_tags=None, description=None, freeform_tags=None, group_id=None, id=None, inactive_state=None,", "= None) -> AwaitableGetGroupResult: \"\"\" This data source provides details about a specific", "if False: yield self return GetGroupResult( compartment_id=self.compartment_id, defined_tags=self.defined_tags, description=self.description, freeform_tags=self.freeform_tags, group_id=self.group_id, id=self.id, inactive_state=self.inactive_state,", "scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Operations.CostCenter\": \"42\"}`", "a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Operations.CostCenter\": \"42\"}` \"\"\" return", "pulumi.runtime.invoke('oci:identity/getGroup:getGroup', __args__, opts=opts, typ=GetGroupResult).value return AwaitableGetGroupResult( compartment_id=__ret__.compartment_id, defined_tags=__ret__.defined_tags, description=__ret__.description, freeform_tags=__ret__.freeform_tags, group_id=__ret__.group_id, id=__ret__.id, inactive_state=__ret__.inactive_state,", "not isinstance(defined_tags, dict): raise TypeError(\"Expected argument 'defined_tags' to be a dict\") pulumi.set(__self__, \"defined_tags\",", "groups in the tenancy and cannot be changed. \"\"\" return pulumi.get(self, \"name\") @property", "pulumi.set(__self__, \"defined_tags\", defined_tags) if description and not isinstance(description, str): raise TypeError(\"Expected argument 'description'", "Mapping[str, Any]: \"\"\" Defined tags for this resource. Each key is predefined and", "str\") pulumi.set(__self__, \"id\", id) if inactive_state and not isinstance(inactive_state, str): raise TypeError(\"Expected argument", "users in the group. To do that, use [ListUserGroupMemberships](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/UserGroupMembership/ListUserGroupMemberships) and provide the group's", "if opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version =", "and not isinstance(compartment_id, str): raise TypeError(\"Expected argument 'compartment_id' to be a str\") pulumi.set(__self__,", "*** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool.", "def compartment_id(self) -> str: \"\"\" The OCID of the tenancy containing the group.", "the group's OCID as a query parameter in the request. ## Example Usage", "@property @pulumi.getter(name=\"definedTags\") def defined_tags(self) -> Mapping[str, Any]: \"\"\" Defined tags for this resource.", "Sequence, Union, overload from .. import _utilities __all__ = [ 'GetGroupResult', 'AwaitableGetGroupResult', 'get_group',", "\"\"\" The OCID of the group. \"\"\" return pulumi.get(self, \"id\") @property @pulumi.getter(name=\"inactiveState\") def", "import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence,", "@pulumi.getter(name=\"freeformTags\") def freeform_tags(self) -> Mapping[str, Any]: \"\"\" Free-form tags for this resource. Each", "lifecycleState. \"\"\" return pulumi.get(self, \"inactive_state\") @property @pulumi.getter def name(self) -> str: \"\"\" The", "changed. \"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter def state(self) -> str: \"\"\" The", "\"freeform_tags\") @property @pulumi.getter(name=\"groupId\") def group_id(self) -> str: return pulumi.get(self, \"group_id\") @property @pulumi.getter def", "Group resource in Oracle Cloud Infrastructure Identity service. Gets the specified group's information.", "compartment_id and not isinstance(compartment_id, str): raise TypeError(\"Expected argument 'compartment_id' to be a str\")", "by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit", "creation. The name must be unique across all groups in the tenancy and", "The group's current state. \"\"\" return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"timeCreated\") def time_created(self) ->", "a dict\") pulumi.set(__self__, \"freeform_tags\", freeform_tags) if group_id and not isinstance(group_id, str): raise TypeError(\"Expected", "\"\"\" return pulumi.get(self, \"inactive_state\") @property @pulumi.getter def name(self) -> str: \"\"\" The name", "pulumi.set(__self__, \"state\", state) if time_created and not isinstance(time_created, str): raise TypeError(\"Expected argument 'time_created'", "str: \"\"\" The name you assign to the group during creation. The name", "resource in Oracle Cloud Infrastructure Identity service. Gets the specified group's information. This", "warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union,", "the group. \"\"\" return pulumi.get(self, \"compartment_id\") @property @pulumi.getter(name=\"definedTags\") def defined_tags(self) -> Mapping[str, Any]:", "\"group_id\", group_id) if id and not isinstance(id, str): raise TypeError(\"Expected argument 'id' to", "Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand", "\"time_created\", time_created) @property @pulumi.getter(name=\"compartmentId\") def compartment_id(self) -> str: \"\"\" The OCID of the", "name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\": \"Finance\"}`", "pulumi.get(self, \"time_created\") class AwaitableGetGroupResult(GetGroupResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self", "\"\"\" The group's current state. \"\"\" return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"timeCreated\") def time_created(self)", "= pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:identity/getGroup:getGroup', __args__,", "dict\") pulumi.set(__self__, \"freeform_tags\", freeform_tags) if group_id and not isinstance(group_id, str): raise TypeError(\"Expected argument", "argument 'id' to be a str\") pulumi.set(__self__, \"id\", id) if inactive_state and not", "-> str: \"\"\" The OCID of the tenancy containing the group. \"\"\" return", "description and not isinstance(description, str): raise TypeError(\"Expected argument 'description' to be a str\")", "\"\"\" return pulumi.get(self, \"time_created\") class AwaitableGetGroupResult(GetGroupResult): # pylint: disable=using-constant-test def __await__(self): if False:", "by RFC3339. Example: `2016-08-25T21:10:29.600Z` \"\"\" return pulumi.get(self, \"time_created\") class AwaitableGetGroupResult(GetGroupResult): # pylint: disable=using-constant-test", "\"name\") @property @pulumi.getter def state(self) -> str: \"\"\" The group's current state. \"\"\"", "values returned by getGroup. \"\"\" def __init__(__self__, compartment_id=None, defined_tags=None, description=None, freeform_tags=None, group_id=None, id=None,", "# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform", "The detailed status of INACTIVE lifecycleState. \"\"\" return pulumi.get(self, \"inactive_state\") @property @pulumi.getter def", "str: \"\"\" The OCID of the group. \"\"\" return pulumi.get(self, \"id\") @property @pulumi.getter(name=\"inactiveState\")", "key is predefined and scoped to a namespace. For more information, see [Resource", "source provides details about a specific Group resource in Oracle Cloud Infrastructure Identity", "if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:identity/getGroup:getGroup', __args__, opts=opts, typ=GetGroupResult).value", "do that, use [ListUserGroupMemberships](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/UserGroupMembership/ListUserGroupMemberships) and provide the group's OCID as a query parameter", "raise TypeError(\"Expected argument 'freeform_tags' to be a dict\") pulumi.set(__self__, \"freeform_tags\", freeform_tags) if group_id", "certain you know what you are doing! *** import warnings import pulumi import", "def get_group(group_id: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetGroupResult: \"\"\" This", "in the tenancy and cannot be changed. \"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter", "a specific Group resource in Oracle Cloud Infrastructure Identity service. Gets the specified", "opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetGroupResult: \"\"\" This data source provides details about", "return pulumi.get(self, \"time_created\") class AwaitableGetGroupResult(GetGroupResult): # pylint: disable=using-constant-test def __await__(self): if False: yield", "and not isinstance(state, str): raise TypeError(\"Expected argument 'state' to be a str\") pulumi.set(__self__,", "``` :param str group_id: The OCID of the group. \"\"\" __args__ = dict()", "import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from ..", "key-value pair with no predefined name, type, or namespace. For more information, see", "\"defined_tags\") @property @pulumi.getter def description(self) -> str: \"\"\" The description you assign to", "pulumi.get(self, \"id\") @property @pulumi.getter(name=\"inactiveState\") def inactive_state(self) -> str: \"\"\" The detailed status of", "return pulumi.get(self, \"defined_tags\") @property @pulumi.getter def description(self) -> str: \"\"\" The description you", "pulumi.set(__self__, \"compartment_id\", compartment_id) if defined_tags and not isinstance(defined_tags, dict): raise TypeError(\"Expected argument 'defined_tags'", "test_group = oci.identity.get_group(group_id=oci_identity_group[\"test_group\"][\"id\"]) ``` :param str group_id: The OCID of the group. \"\"\"", "the format defined by RFC3339. Example: `2016-08-25T21:10:29.600Z` \"\"\" return pulumi.get(self, \"time_created\") class AwaitableGetGroupResult(GetGroupResult):", "= _utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:identity/getGroup:getGroup', __args__, opts=opts, typ=GetGroupResult).value return AwaitableGetGroupResult( compartment_id=__ret__.compartment_id, defined_tags=__ret__.defined_tags, description=__ret__.description,", "collection of values returned by getGroup. \"\"\" def __init__(__self__, compartment_id=None, defined_tags=None, description=None, freeform_tags=None,", "typ=GetGroupResult).value return AwaitableGetGroupResult( compartment_id=__ret__.compartment_id, defined_tags=__ret__.defined_tags, description=__ret__.description, freeform_tags=__ret__.freeform_tags, group_id=__ret__.group_id, id=__ret__.id, inactive_state=__ret__.inactive_state, name=__ret__.name, state=__ret__.state, time_created=__ret__.time_created)", "compartment_id(self) -> str: \"\"\" The OCID of the tenancy containing the group. \"\"\"", "status of INACTIVE lifecycleState. \"\"\" return pulumi.get(self, \"inactive_state\") @property @pulumi.getter def name(self) ->", "in Oracle Cloud Infrastructure Identity service. Gets the specified group's information. This operation", "\"\"\" return pulumi.get(self, \"freeform_tags\") @property @pulumi.getter(name=\"groupId\") def group_id(self) -> str: return pulumi.get(self, \"group_id\")", "a str\") pulumi.set(__self__, \"group_id\", group_id) if id and not isinstance(id, str): raise TypeError(\"Expected", "name) if state and not isinstance(state, str): raise TypeError(\"Expected argument 'state' to be", "Mapping[str, Any]: \"\"\" Free-form tags for this resource. Each tag is a simple", "\"\"\" The detailed status of INACTIVE lifecycleState. \"\"\" return pulumi.get(self, \"inactive_state\") @property @pulumi.getter", "pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from", "time_created) @property @pulumi.getter(name=\"compartmentId\") def compartment_id(self) -> str: \"\"\" The OCID of the tenancy", "-> AwaitableGetGroupResult: \"\"\" This data source provides details about a specific Group resource", "does not return a list of all the users in the group. To", "oci.identity.get_group(group_id=oci_identity_group[\"test_group\"][\"id\"]) ``` :param str group_id: The OCID of the group. \"\"\" __args__ =", "@pulumi.getter def description(self) -> str: \"\"\" The description you assign to the group.", "the request. ## Example Usage ```python import pulumi import pulumi_oci as oci test_group", "TypeError(\"Expected argument 'state' to be a str\") pulumi.set(__self__, \"state\", state) if time_created and", "The name must be unique across all groups in the tenancy and cannot", "and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Operations.CostCenter\":", "@property @pulumi.getter def description(self) -> str: \"\"\" The description you assign to the", "not isinstance(freeform_tags, dict): raise TypeError(\"Expected argument 'freeform_tags' to be a dict\") pulumi.set(__self__, \"freeform_tags\",", "parameter in the request. ## Example Usage ```python import pulumi import pulumi_oci as", "if inactive_state and not isinstance(inactive_state, str): raise TypeError(\"Expected argument 'inactive_state' to be a", "if state and not isinstance(state, str): raise TypeError(\"Expected argument 'state' to be a", "group_id) if id and not isinstance(id, str): raise TypeError(\"Expected argument 'id' to be", "to be unique, and it's changeable. \"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter(name=\"freeformTags\") def", "of the group. \"\"\" return pulumi.get(self, \"id\") @property @pulumi.getter(name=\"inactiveState\") def inactive_state(self) -> str:", "query parameter in the request. ## Example Usage ```python import pulumi import pulumi_oci", "Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Operations.CostCenter\": \"42\"}` \"\"\" return pulumi.get(self, \"defined_tags\") @property @pulumi.getter def description(self) ->", "provides details about a specific Group resource in Oracle Cloud Infrastructure Identity service.", "dict): raise TypeError(\"Expected argument 'defined_tags' to be a dict\") pulumi.set(__self__, \"defined_tags\", defined_tags) if", "\"state\") @property @pulumi.getter(name=\"timeCreated\") def time_created(self) -> str: \"\"\" Date and time the group", "= group_id if opts is None: opts = pulumi.InvokeOptions() if opts.version is None:", "str): raise TypeError(\"Expected argument 'id' to be a str\") pulumi.set(__self__, \"id\", id) if", "provide the group's OCID as a query parameter in the request. ## Example", "know what you are doing! *** import warnings import pulumi import pulumi.runtime from", "str: \"\"\" The group's current state. \"\"\" return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"timeCreated\") def", "to be a str\") pulumi.set(__self__, \"inactive_state\", inactive_state) if name and not isinstance(name, str):", "\"\"\" return pulumi.get(self, \"state\") @property @pulumi.getter(name=\"timeCreated\") def time_created(self) -> str: \"\"\" Date and", "\"\"\" This data source provides details about a specific Group resource in Oracle", "import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities __all__ =", "import pulumi_oci as oci test_group = oci.identity.get_group(group_id=oci_identity_group[\"test_group\"][\"id\"]) ``` :param str group_id: The OCID", "Example: `{\"Operations.CostCenter\": \"42\"}` \"\"\" return pulumi.get(self, \"defined_tags\") @property @pulumi.getter def description(self) -> str:", "name you assign to the group during creation. The name must be unique", "def inactive_state(self) -> str: \"\"\" The detailed status of INACTIVE lifecycleState. \"\"\" return", "Usage ```python import pulumi import pulumi_oci as oci test_group = oci.identity.get_group(group_id=oci_identity_group[\"test_group\"][\"id\"]) ``` :param", "as a query parameter in the request. ## Example Usage ```python import pulumi", "None) -> AwaitableGetGroupResult: \"\"\" This data source provides details about a specific Group", "group_id=self.group_id, id=self.id, inactive_state=self.inactive_state, name=self.name, state=self.state, time_created=self.time_created) def get_group(group_id: Optional[str] = None, opts: Optional[pulumi.InvokeOptions]", "if freeform_tags and not isinstance(freeform_tags, dict): raise TypeError(\"Expected argument 'freeform_tags' to be a", "AwaitableGetGroupResult(GetGroupResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetGroupResult( compartment_id=self.compartment_id,", "group_id=None, id=None, inactive_state=None, name=None, state=None, time_created=None): if compartment_id and not isinstance(compartment_id, str): raise", "group_id and not isinstance(group_id, str): raise TypeError(\"Expected argument 'group_id' to be a str\")", "TypeError(\"Expected argument 'inactive_state' to be a str\") pulumi.set(__self__, \"inactive_state\", inactive_state) if name and", "OCID of the tenancy containing the group. \"\"\" return pulumi.get(self, \"compartment_id\") @property @pulumi.getter(name=\"definedTags\")", "def id(self) -> str: \"\"\" The OCID of the group. \"\"\" return pulumi.get(self,", "id(self) -> str: \"\"\" The OCID of the group. \"\"\" return pulumi.get(self, \"id\")", "str): raise TypeError(\"Expected argument 'description' to be a str\") pulumi.set(__self__, \"description\", description) if", ".. import _utilities __all__ = [ 'GetGroupResult', 'AwaitableGetGroupResult', 'get_group', ] @pulumi.output_type class GetGroupResult:", "namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Operations.CostCenter\": \"42\"}` \"\"\" return pulumi.get(self,", "pulumi.get(self, \"freeform_tags\") @property @pulumi.getter(name=\"groupId\") def group_id(self) -> str: return pulumi.get(self, \"group_id\") @property @pulumi.getter", "specified group's information. This operation does not return a list of all the", "of all the users in the group. To do that, use [ListUserGroupMemberships](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/UserGroupMembership/ListUserGroupMemberships) and", "@pulumi.output_type class GetGroupResult: \"\"\" A collection of values returned by getGroup. \"\"\" def", "-> str: return pulumi.get(self, \"group_id\") @property @pulumi.getter def id(self) -> str: \"\"\" The", "\"time_created\") class AwaitableGetGroupResult(GetGroupResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return", "state and not isinstance(state, str): raise TypeError(\"Expected argument 'state' to be a str\")", "the tenancy containing the group. \"\"\" return pulumi.get(self, \"compartment_id\") @property @pulumi.getter(name=\"definedTags\") def defined_tags(self)", "if group_id and not isinstance(group_id, str): raise TypeError(\"Expected argument 'group_id' to be a", "Example: `2016-08-25T21:10:29.600Z` \"\"\" return pulumi.get(self, \"time_created\") class AwaitableGetGroupResult(GetGroupResult): # pylint: disable=using-constant-test def __await__(self):", "the group. \"\"\" return pulumi.get(self, \"id\") @property @pulumi.getter(name=\"inactiveState\") def inactive_state(self) -> str: \"\"\"", "\"group_id\") @property @pulumi.getter def id(self) -> str: \"\"\" The OCID of the group.", "description you assign to the group. Does not have to be unique, and", "id and not isinstance(id, str): raise TypeError(\"Expected argument 'id' to be a str\")", "str): raise TypeError(\"Expected argument 'time_created' to be a str\") pulumi.set(__self__, \"time_created\", time_created) @property", "pulumi.get(self, \"compartment_id\") @property @pulumi.getter(name=\"definedTags\") def defined_tags(self) -> Mapping[str, Any]: \"\"\" Defined tags for", "_utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:identity/getGroup:getGroup', __args__, opts=opts, typ=GetGroupResult).value return AwaitableGetGroupResult( compartment_id=__ret__.compartment_id, defined_tags=__ret__.defined_tags, description=__ret__.description, freeform_tags=__ret__.freeform_tags,", "__args__ = dict() __args__['groupId'] = group_id if opts is None: opts = pulumi.InvokeOptions()", "Do not edit by hand unless you're certain you know what you are", "getGroup. \"\"\" def __init__(__self__, compartment_id=None, defined_tags=None, description=None, freeform_tags=None, group_id=None, id=None, inactive_state=None, name=None, state=None,", "defined by RFC3339. Example: `2016-08-25T21:10:29.600Z` \"\"\" return pulumi.get(self, \"time_created\") class AwaitableGetGroupResult(GetGroupResult): # pylint:", "pulumi.get(self, \"state\") @property @pulumi.getter(name=\"timeCreated\") def time_created(self) -> str: \"\"\" Date and time the", "isinstance(defined_tags, dict): raise TypeError(\"Expected argument 'defined_tags' to be a dict\") pulumi.set(__self__, \"defined_tags\", defined_tags)", "if compartment_id and not isinstance(compartment_id, str): raise TypeError(\"Expected argument 'compartment_id' to be a", "__init__(__self__, compartment_id=None, defined_tags=None, description=None, freeform_tags=None, group_id=None, id=None, inactive_state=None, name=None, state=None, time_created=None): if compartment_id", "Free-form tags for this resource. Each tag is a simple key-value pair with", "\"Finance\"}` \"\"\" return pulumi.get(self, \"freeform_tags\") @property @pulumi.getter(name=\"groupId\") def group_id(self) -> str: return pulumi.get(self,", "not isinstance(time_created, str): raise TypeError(\"Expected argument 'time_created' to be a str\") pulumi.set(__self__, \"time_created\",", "are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any,", "generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not", "WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***", "-> str: \"\"\" The name you assign to the group during creation. The", "return pulumi.get(self, \"name\") @property @pulumi.getter def state(self) -> str: \"\"\" The group's current", "have to be unique, and it's changeable. \"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter(name=\"freeformTags\")", "pulumi_oci as oci test_group = oci.identity.get_group(group_id=oci_identity_group[\"test_group\"][\"id\"]) ``` :param str group_id: The OCID of", "a str\") pulumi.set(__self__, \"name\", name) if state and not isinstance(state, str): raise TypeError(\"Expected", "pylint: disable=using-constant-test def __await__(self): if False: yield self return GetGroupResult( compartment_id=self.compartment_id, defined_tags=self.defined_tags, description=self.description,", "data source provides details about a specific Group resource in Oracle Cloud Infrastructure", "is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__", "not isinstance(description, str): raise TypeError(\"Expected argument 'description' to be a str\") pulumi.set(__self__, \"description\",", "unique, and it's changeable. \"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter(name=\"freeformTags\") def freeform_tags(self) ->", "Example Usage ```python import pulumi import pulumi_oci as oci test_group = oci.identity.get_group(group_id=oci_identity_group[\"test_group\"][\"id\"]) ```", "Defined tags for this resource. Each key is predefined and scoped to a", "For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Operations.CostCenter\": \"42\"}` \"\"\" return pulumi.get(self, \"defined_tags\")", "str): raise TypeError(\"Expected argument 'group_id' to be a str\") pulumi.set(__self__, \"group_id\", group_id) if", "predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example:", "assign to the group during creation. The name must be unique across all", "\"description\", description) if freeform_tags and not isinstance(freeform_tags, dict): raise TypeError(\"Expected argument 'freeform_tags' to", "argument 'name' to be a str\") pulumi.set(__self__, \"name\", name) if state and not", "information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\": \"Finance\"}` \"\"\" return pulumi.get(self, \"freeform_tags\") @property @pulumi.getter(name=\"groupId\")", "import _utilities __all__ = [ 'GetGroupResult', 'AwaitableGetGroupResult', 'get_group', ] @pulumi.output_type class GetGroupResult: \"\"\"", "@property @pulumi.getter def state(self) -> str: \"\"\" The group's current state. \"\"\" return", "description=self.description, freeform_tags=self.freeform_tags, group_id=self.group_id, id=self.id, inactive_state=self.inactive_state, name=self.name, state=self.state, time_created=self.time_created) def get_group(group_id: Optional[str] = None,", "time_created and not isinstance(time_created, str): raise TypeError(\"Expected argument 'time_created' to be a str\")", "TypeError(\"Expected argument 'time_created' to be a str\") pulumi.set(__self__, \"time_created\", time_created) @property @pulumi.getter(name=\"compartmentId\") def", "state) if time_created and not isinstance(time_created, str): raise TypeError(\"Expected argument 'time_created' to be", "group's OCID as a query parameter in the request. ## Example Usage ```python", "def defined_tags(self) -> Mapping[str, Any]: \"\"\" Defined tags for this resource. Each key", "of the group. \"\"\" __args__ = dict() __args__['groupId'] = group_id if opts is", "isinstance(freeform_tags, dict): raise TypeError(\"Expected argument 'freeform_tags' to be a dict\") pulumi.set(__self__, \"freeform_tags\", freeform_tags)", "pulumi.set(__self__, \"description\", description) if freeform_tags and not isinstance(freeform_tags, dict): raise TypeError(\"Expected argument 'freeform_tags'", "get_group(group_id: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetGroupResult: \"\"\" This data", "Any]: \"\"\" Free-form tags for this resource. Each tag is a simple key-value", "in the format defined by RFC3339. Example: `2016-08-25T21:10:29.600Z` \"\"\" return pulumi.get(self, \"time_created\") class", "during creation. The name must be unique across all groups in the tenancy", "'GetGroupResult', 'AwaitableGetGroupResult', 'get_group', ] @pulumi.output_type class GetGroupResult: \"\"\" A collection of values returned", "A collection of values returned by getGroup. \"\"\" def __init__(__self__, compartment_id=None, defined_tags=None, description=None,", "__args__['groupId'] = group_id if opts is None: opts = pulumi.InvokeOptions() if opts.version is", "\"\"\" A collection of values returned by getGroup. \"\"\" def __init__(__self__, compartment_id=None, defined_tags=None,", "pulumi.set(__self__, \"inactive_state\", inactive_state) if name and not isinstance(name, str): raise TypeError(\"Expected argument 'name'", "a str\") pulumi.set(__self__, \"inactive_state\", inactive_state) if name and not isinstance(name, str): raise TypeError(\"Expected", "str\") pulumi.set(__self__, \"description\", description) if freeform_tags and not isinstance(freeform_tags, dict): raise TypeError(\"Expected argument", "argument 'description' to be a str\") pulumi.set(__self__, \"description\", description) if freeform_tags and not", "of the tenancy containing the group. \"\"\" return pulumi.get(self, \"compartment_id\") @property @pulumi.getter(name=\"definedTags\") def", "time_created=None): if compartment_id and not isinstance(compartment_id, str): raise TypeError(\"Expected argument 'compartment_id' to be", "# pylint: disable=using-constant-test def __await__(self): if False: yield self return GetGroupResult( compartment_id=self.compartment_id, defined_tags=self.defined_tags,", "oci test_group = oci.identity.get_group(group_id=oci_identity_group[\"test_group\"][\"id\"]) ``` :param str group_id: The OCID of the group.", "def __init__(__self__, compartment_id=None, defined_tags=None, description=None, freeform_tags=None, group_id=None, id=None, inactive_state=None, name=None, state=None, time_created=None): if", "and time the group was created, in the format defined by RFC3339. Example:", "raise TypeError(\"Expected argument 'group_id' to be a str\") pulumi.set(__self__, \"group_id\", group_id) if id", "GetGroupResult( compartment_id=self.compartment_id, defined_tags=self.defined_tags, description=self.description, freeform_tags=self.freeform_tags, group_id=self.group_id, id=self.id, inactive_state=self.inactive_state, name=self.name, state=self.state, time_created=self.time_created) def get_group(group_id:", "pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:identity/getGroup:getGroup', __args__, opts=opts,", "hand unless you're certain you know what you are doing! *** import warnings", "Gets the specified group's information. This operation does not return a list of", "this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** #", "a str\") pulumi.set(__self__, \"state\", state) if time_created and not isinstance(time_created, str): raise TypeError(\"Expected", "yield self return GetGroupResult( compartment_id=self.compartment_id, defined_tags=self.defined_tags, description=self.description, freeform_tags=self.freeform_tags, group_id=self.group_id, id=self.id, inactive_state=self.inactive_state, name=self.name, state=self.state,", "in the group. To do that, use [ListUserGroupMemberships](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/UserGroupMembership/ListUserGroupMemberships) and provide the group's OCID", "Union, overload from .. import _utilities __all__ = [ 'GetGroupResult', 'AwaitableGetGroupResult', 'get_group', ]", "or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\": \"Finance\"}` \"\"\" return", "\"name\", name) if state and not isinstance(state, str): raise TypeError(\"Expected argument 'state' to", "cannot be changed. \"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter def state(self) -> str:", "description=None, freeform_tags=None, group_id=None, id=None, inactive_state=None, name=None, state=None, time_created=None): if compartment_id and not isinstance(compartment_id,", "-> Mapping[str, Any]: \"\"\" Defined tags for this resource. Each key is predefined", "group. \"\"\" return pulumi.get(self, \"compartment_id\") @property @pulumi.getter(name=\"definedTags\") def defined_tags(self) -> Mapping[str, Any]: \"\"\"", "to be a str\") pulumi.set(__self__, \"description\", description) if freeform_tags and not isinstance(freeform_tags, dict):", "if name and not isinstance(name, str): raise TypeError(\"Expected argument 'name' to be a", "str): raise TypeError(\"Expected argument 'state' to be a str\") pulumi.set(__self__, \"state\", state) if", "with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).", "Cloud Infrastructure Identity service. Gets the specified group's information. This operation does not", "created, in the format defined by RFC3339. Example: `2016-08-25T21:10:29.600Z` \"\"\" return pulumi.get(self, \"time_created\")", "str\") pulumi.set(__self__, \"state\", state) if time_created and not isinstance(time_created, str): raise TypeError(\"Expected argument", "@pulumi.getter(name=\"definedTags\") def defined_tags(self) -> Mapping[str, Any]: \"\"\" Defined tags for this resource. Each", "description(self) -> str: \"\"\" The description you assign to the group. Does not", "`{\"Department\": \"Finance\"}` \"\"\" return pulumi.get(self, \"freeform_tags\") @property @pulumi.getter(name=\"groupId\") def group_id(self) -> str: return", "return pulumi.get(self, \"compartment_id\") @property @pulumi.getter(name=\"definedTags\") def defined_tags(self) -> Mapping[str, Any]: \"\"\" Defined tags", "The name you assign to the group during creation. The name must be", "what you are doing! *** import warnings import pulumi import pulumi.runtime from typing", "was created, in the format defined by RFC3339. Example: `2016-08-25T21:10:29.600Z` \"\"\" return pulumi.get(self,", "group. Does not have to be unique, and it's changeable. \"\"\" return pulumi.get(self,", "pulumi.get(self, \"group_id\") @property @pulumi.getter def id(self) -> str: \"\"\" The OCID of the", "class AwaitableGetGroupResult(GetGroupResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetGroupResult(", "\"\"\" Date and time the group was created, in the format defined by", "you know what you are doing! *** import warnings import pulumi import pulumi.runtime", "a query parameter in the request. ## Example Usage ```python import pulumi import", "Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities __all__ = [", "return pulumi.get(self, \"description\") @property @pulumi.getter(name=\"freeformTags\") def freeform_tags(self) -> Mapping[str, Any]: \"\"\" Free-form tags", "to be a str\") pulumi.set(__self__, \"state\", state) if time_created and not isinstance(time_created, str):", "TypeError(\"Expected argument 'defined_tags' to be a dict\") pulumi.set(__self__, \"defined_tags\", defined_tags) if description and", "[ListUserGroupMemberships](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/UserGroupMembership/ListUserGroupMemberships) and provide the group's OCID as a query parameter in the request.", "@property @pulumi.getter(name=\"timeCreated\") def time_created(self) -> str: \"\"\" Date and time the group was", "pulumi.set(__self__, \"time_created\", time_created) @property @pulumi.getter(name=\"compartmentId\") def compartment_id(self) -> str: \"\"\" The OCID of", "TypeError(\"Expected argument 'description' to be a str\") pulumi.set(__self__, \"description\", description) if freeform_tags and", "name must be unique across all groups in the tenancy and cannot be", "\"\"\" Defined tags for this resource. Each key is predefined and scoped to", "defined_tags(self) -> Mapping[str, Any]: \"\"\" Defined tags for this resource. Each key is", "str\") pulumi.set(__self__, \"compartment_id\", compartment_id) if defined_tags and not isinstance(defined_tags, dict): raise TypeError(\"Expected argument", "if defined_tags and not isinstance(defined_tags, dict): raise TypeError(\"Expected argument 'defined_tags' to be a", "'state' to be a str\") pulumi.set(__self__, \"state\", state) if time_created and not isinstance(time_created,", "raise TypeError(\"Expected argument 'name' to be a str\") pulumi.set(__self__, \"name\", name) if state", "-> str: \"\"\" The OCID of the group. \"\"\" return pulumi.get(self, \"id\") @property", "Identity service. Gets the specified group's information. This operation does not return a", "to be a str\") pulumi.set(__self__, \"group_id\", group_id) if id and not isinstance(id, str):", "pulumi.set(__self__, \"freeform_tags\", freeform_tags) if group_id and not isinstance(group_id, str): raise TypeError(\"Expected argument 'group_id'", "str: \"\"\" Date and time the group was created, in the format defined", "def __await__(self): if False: yield self return GetGroupResult( compartment_id=self.compartment_id, defined_tags=self.defined_tags, description=self.description, freeform_tags=self.freeform_tags, group_id=self.group_id,", "'inactive_state' to be a str\") pulumi.set(__self__, \"inactive_state\", inactive_state) if name and not isinstance(name,", "be a dict\") pulumi.set(__self__, \"defined_tags\", defined_tags) if description and not isinstance(description, str): raise", "file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # ***", "return pulumi.get(self, \"group_id\") @property @pulumi.getter def id(self) -> str: \"\"\" The OCID of", "To do that, use [ListUserGroupMemberships](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/UserGroupMembership/ListUserGroupMemberships) and provide the group's OCID as a query", "'time_created' to be a str\") pulumi.set(__self__, \"time_created\", time_created) @property @pulumi.getter(name=\"compartmentId\") def compartment_id(self) ->", "compartment_id=self.compartment_id, defined_tags=self.defined_tags, description=self.description, freeform_tags=self.freeform_tags, group_id=self.group_id, id=self.id, inactive_state=self.inactive_state, name=self.name, state=self.state, time_created=self.time_created) def get_group(group_id: Optional[str]", "not isinstance(id, str): raise TypeError(\"Expected argument 'id' to be a str\") pulumi.set(__self__, \"id\",", "be a dict\") pulumi.set(__self__, \"freeform_tags\", freeform_tags) if group_id and not isinstance(group_id, str): raise", "is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('oci:identity/getGroup:getGroup', __args__, opts=opts, typ=GetGroupResult).value return AwaitableGetGroupResult(", "str\") pulumi.set(__self__, \"name\", name) if state and not isinstance(state, str): raise TypeError(\"Expected argument", "about a specific Group resource in Oracle Cloud Infrastructure Identity service. Gets the", "isinstance(id, str): raise TypeError(\"Expected argument 'id' to be a str\") pulumi.set(__self__, \"id\", id)", "Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\": \"Finance\"}` \"\"\" return pulumi.get(self, \"freeform_tags\") @property @pulumi.getter(name=\"groupId\") def group_id(self) ->", "was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do", "see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{\"Department\": \"Finance\"}` \"\"\" return pulumi.get(self, \"freeform_tags\") @property @pulumi.getter(name=\"groupId\") def", "import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload", "tenancy containing the group. \"\"\" return pulumi.get(self, \"compartment_id\") @property @pulumi.getter(name=\"definedTags\") def defined_tags(self) ->", "name=self.name, state=self.state, time_created=self.time_created) def get_group(group_id: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) ->", "to be a str\") pulumi.set(__self__, \"name\", name) if state and not isinstance(state, str):", "information. This operation does not return a list of all the users in", "Does not have to be unique, and it's changeable. \"\"\" return pulumi.get(self, \"description\")", "@property @pulumi.getter(name=\"freeformTags\") def freeform_tags(self) -> Mapping[str, Any]: \"\"\" Free-form tags for this resource.", "freeform_tags=self.freeform_tags, group_id=self.group_id, id=self.id, inactive_state=self.inactive_state, name=self.name, state=self.state, time_created=self.time_created) def get_group(group_id: Optional[str] = None, opts:", "raise TypeError(\"Expected argument 'state' to be a str\") pulumi.set(__self__, \"state\", state) if time_created", "str\") pulumi.set(__self__, \"time_created\", time_created) @property @pulumi.getter(name=\"compartmentId\") def compartment_id(self) -> str: \"\"\" The OCID", "the group. To do that, use [ListUserGroupMemberships](https://docs.cloud.oracle.com/iaas/api/#/en/identity/20160918/UserGroupMembership/ListUserGroupMemberships) and provide the group's OCID as", "not isinstance(inactive_state, str): raise TypeError(\"Expected argument 'inactive_state' to be a str\") pulumi.set(__self__, \"inactive_state\",", "overload from .. import _utilities __all__ = [ 'GetGroupResult', 'AwaitableGetGroupResult', 'get_group', ] @pulumi.output_type", "\"inactive_state\", inactive_state) if name and not isinstance(name, str): raise TypeError(\"Expected argument 'name' to", "Optional[pulumi.InvokeOptions] = None) -> AwaitableGetGroupResult: \"\"\" This data source provides details about a" ]
[ "== 'POST': # divulgacao = Establishment() # divulgacao.nomeEvento = request.POST['nomeEvento'] # divulgacao.categoria =", "DonationForm(instance=donation) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_voluntary(request, id): title = \"Editar", "# def formevent(request): # if request.method == 'POST': # divulgacao = Establishment() #", "\"POST\": form = DonationForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form':", "request.POST['horarioInicio'] # divulgacao.horarioFim = request.POST['horarioFim'] # divulgacao.data = request.POST['data'] # divulgacao.user = request.user", "return render(request, 'mapa.html', {'latitude':latitude, 'longitude': longitude}) # return render(request, 'index.html') def servicolist(request): estabelecimentos", "create_donation(request): title = \"Cadastrar Doação\" if request.method == \"POST\": form = DonationForm(request.POST) if", "def delete_health_service(request, id): health_service = get_object_or_404(HealthService, id=id) health_service.delete() return render(request, 'delete.html') @login_required def", "return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = HealthServiceForm(instance=health_service) return render(request,", "CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm , VoluntaryServiceForm from .models import Establishment, HealthService, Donation, VoluntaryService", "def formevent(request): # if request.method == 'POST': # divulgacao = Establishment() # divulgacao.nomeEvento", "= smtplib.SMTP('smtp.gmail.com', 587) mail.ehlo() mail.starttls() mail.login('<EMAIL>', 'fiscaeunb') mail.sendmail('<EMAIL>', email, m.as_string()) return render(request, 'fale-conosco.html',", "# divulgacao.horarioInicio = request.POST['horarioInicio'] # divulgacao.horarioFim = request.POST['horarioFim'] # divulgacao.data = request.POST['data'] #", "if request.method == 'POST': # divulgacao = Establishment() # divulgacao.nomeEvento = request.POST['nomeEvento'] #", "return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = DonationForm() return render(request,", "CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm @login_required def create_health_service(request): title = \"Cadastrar Serviço\" if request.method", "form,'title':title}) else: form = HealthServiceForm(instance=health_service) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_donation(request,", "{'form': form,'title':title}) @login_required def edit_establishment(request, id): title = \"Editar Estabelecimento\" establishment = get_object_or_404(Establishment,", "def list_donation(request): donations = Donation.objects.all() return render(request, 'list_donation.html', {\"donations\": donations}) def list_voluntary(request): volunteers", "form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = EstablishmentForm(instance=establishment) return", "id): servico = HealthService.objects.get(id=id) if request.method == \"POST\": nome = request.POST['nome'] email =", "HealthServiceForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def create_donation(request): title = \"Cadastrar Doação\"", "get_object_or_404(Establishment, id=id) establishment.delete() return render(request, 'delete.html') @login_required def delete_health_service(request, id): health_service = get_object_or_404(HealthService,", "return render(request, 'establishment.html', {'form': form,'title':title}) else: form = VoluntaryServiceForm(instance=voluntary) return render(request, 'establishment.html', {'form':", "Establishment.objects.all() print(establishments) return render(request, 'list_establishment.html', {\"establishments\": establishments}) def list_health_service(request): health_services = HealthService.objects.all() return", "smtplib # @login_required # def formevent(request): # if request.method == 'POST': # divulgacao", "from django.shortcuts import get_object_or_404 from django.shortcuts import render from django.urls import reverse from", "@login_required def create_voluntary(request): title = \"Cadastrar Voluntário\" if request.method == \"POST\": form =", "# return render(request, 'index.html') def servicolist(request): estabelecimentos = Establishment.objects.all() saude = HealthService.objects.all() doacao", "list_voluntary(request): volunteers = VoluntaryService.objects.all() print(volunteers) return render(request, 'list_voluntary.html', {\"volunteers\": volunteers}) def fale_conosco(request, id):", "== \"POST\": form = DonationForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html',", "title = \"Editar Doação\" donation = get_object_or_404(Donation, id=id) if request.method == \"POST\": form", "title = \"Cadastrar Voluntário\" if request.method == \"POST\": form = VoluntaryServiceForm(request.POST) if form.is_valid():", "return render(request, 'fale-conosco.html', {\"servico\": servico}) def perfil(request, id): servico = HealthService.objects.get(id=id) return render(request,", "divulgacao.horarioFim = request.POST['horarioFim'] # divulgacao.data = request.POST['data'] # divulgacao.user = request.user # divulgacao.save()", "form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = HealthServiceForm(instance=health_service) return", "Donation.objects.all() return render(request, 'servicolist.html', {'estabelecimentos':estabelecimentos,'saude':saude,'doacao':doacao}) @login_required def create_establishment(request): title = \"Cadastrar Estabelecimento\" if", "= request.POST['cep'] # divulgacao.telefone = request.POST['telefone'] # divulgacao.horarioInicio = request.POST['horarioInicio'] # divulgacao.horarioFim =", "'establishment.html', {'form': form,'title':title}) else: form = VoluntaryServiceForm(instance=voluntary) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required", "HttpResponse from django.contrib.auth.decorators import login_required from .forms import CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm ,", "latitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lat\"] longitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lng\"] #print(latitude) #print(longitude) return render(request, 'mapa.html', {'latitude':latitude, 'longitude':", "title = \"Cadastrar Estabelecimento\" if request.method == \"POST\": form = EstablishmentForm(request.POST) if form.is_valid():", "'establishment.html', {'form': form,'title':title}) @login_required def create_donation(request): title = \"Cadastrar Doação\" if request.method ==", "longitude = 0 # if request.method == \"POST\": chave = \"<KEY>\" div =", "return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = HealthServiceForm() return render(request,", ".forms import CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm , VoluntaryServiceForm from .models import Establishment, HealthService,", "def edit_donation(request, id): title = \"Editar Doação\" donation = get_object_or_404(Donation, id=id) if request.method", "donations}) def list_voluntary(request): volunteers = VoluntaryService.objects.all() print(volunteers) return render(request, 'list_voluntary.html', {\"volunteers\": volunteers}) def", "{'estabelecimentos':estabelecimentos,'saude':saude,'doacao':doacao}) @login_required def create_establishment(request): title = \"Cadastrar Estabelecimento\" if request.method == \"POST\": form", "divulgacao.telefone = request.POST['telefone'] # divulgacao.horarioInicio = request.POST['horarioInicio'] # divulgacao.horarioFim = request.POST['horarioFim'] # divulgacao.data", "servico = HealthService.objects.get(id=id) if request.method == \"POST\": nome = request.POST['nome'] email = request.POST['email']", "= DonationForm(request.POST, instance=donation) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title})", "return render(request , 'formevent.html') def mapa(request, id): latitude = 0 longitude = 0", "= VoluntaryServiceForm() return render(request, 'establishment.html', {'form': form,'title':title}) from .forms import CommunityActionForm, DonationForm, HealthServiceForm,", "= HealthService.objects.get(id=id) if request.method == \"POST\": nome = request.POST['nome'] email = request.POST['email'] texto", "Establishment, HealthService, Donation, VoluntaryService # from .models import Divulgacoes import requests import urllib,", "HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = DonationForm() return render(request, 'establishment.html',", "'fale-conosco.html', {\"servico\": servico}) def perfil(request, id): servico = HealthService.objects.get(id=id) return render(request, 'perfil.html', {\"servico\":servico})", "mail.login('<EMAIL>', 'fiscaeunb') mail.sendmail('<EMAIL>', email, m.as_string()) return render(request, 'fale-conosco.html', {\"servico\": servico}) def perfil(request, id):", "id=id) if request.method == \"POST\": form = VoluntaryServiceForm(request.POST, instance=voluntary) if form.is_valid(): form.save() return", "render(request, 'establishment.html', {'form': form,'title':title}) @login_required def create_voluntary(request): title = \"Cadastrar Voluntário\" if request.method", "== \"POST\": form = HealthServiceForm(request.POST, instance=health_service) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request,", "title = \"Cadastrar Doação\" if request.method == \"POST\": form = DonationForm(request.POST) if form.is_valid():", "# divulgacao.user = request.user # divulgacao.save() # return render(request , 'index.html') # return", "return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def create_donation(request): title = \"Cadastrar Doação\" if", "form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = EstablishmentForm(instance=establishment)", "= \"Editar Estabelecimento\" establishment = get_object_or_404(Establishment, id=id) if request.method == \"POST\": form =", "= request.POST['bairro'] # divulgacao.endereco = request.POST['endereco'] # #divulgacao.cep = request.POST['cep'] # divulgacao.telefone =", "return render(request, 'establishment.html', {'form': form,'title':title}) else: form = EstablishmentForm(instance=establishment) return render(request, 'establishment.html', {'form':", "form = HealthServiceForm(request.POST, instance=health_service) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form':", "mail = smtplib.SMTP('smtp.gmail.com', 587) mail.ehlo() mail.starttls() mail.login('<EMAIL>', 'fiscaeunb') mail.sendmail('<EMAIL>', email, m.as_string()) return render(request,", "id): health_service = get_object_or_404(HealthService, id=id) health_service.delete() return render(request, 'delete.html') @login_required def delete_donation(request, id):", "render(request, 'delete.html') def list_establishment(request): establishments = Establishment.objects.all() print(establishments) return render(request, 'list_establishment.html', {\"establishments\": establishments})", "id=id) delete_donation.delete() return render(request, 'delete.html') @login_required def delete_voluntary(request, id): delete_voluntary = get_object_or_404(VoluntaryService, id=id)", "render(request, 'list_health_service.html', {\"health_services\": health_services}) def list_donation(request): donations = Donation.objects.all() return render(request, 'list_donation.html', {\"donations\":", "form = HealthServiceForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def create_donation(request): title =", "form,'title':title}) else: form = VoluntaryServiceForm() return render(request, 'establishment.html', {'form': form,'title':title}) from .forms import", "request.POST['endereco'] # #divulgacao.cep = request.POST['cep'] # divulgacao.telefone = request.POST['telefone'] # divulgacao.horarioInicio = request.POST['horarioInicio']", "# if request.method == 'POST': # divulgacao = Establishment() # divulgacao.nomeEvento = request.POST['nomeEvento']", "HealthServiceForm(instance=health_service) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_donation(request, id): title = \"Editar", "divulgacao.categoria = request.POST['categoria'] # divulgacao.cidade = request.POST['cidade'] # #divulgacao.bairro = request.POST['bairro'] # divulgacao.endereco", "\"POST\": form = HealthServiceForm(request.POST, instance=health_service) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html',", "0 # if request.method == \"POST\": chave = \"<KEY>\" div = Establishment.objects.get(id=id) address", "{'form': form,'title':title}) @login_required def edit_voluntary(request, id): title = \"Editar Voluntário\" voluntary = get_object_or_404(VoluntaryService,", "request.method == \"POST\": chave = \"<KEY>\" div = Establishment.objects.get(id=id) address = div.endereco+\"+\"+div.cidade r", "@login_required def edit_health_service(request, id): title = \"Editar Serviço de Saude\" health_service = get_object_or_404(HealthService,", "'establishment.html', {'form': form,'title':title}) @login_required def edit_health_service(request, id): title = \"Editar Serviço de Saude\"", "# divulgacao.categoria = request.POST['categoria'] # divulgacao.cidade = request.POST['cidade'] # #divulgacao.bairro = request.POST['bairro'] #", "request.method == \"POST\": form = VoluntaryServiceForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request,", "= request.POST['mensagem'] m = MIMEText(texto) m.set_charset('utf-8') m['Subject'] = email mail = smtplib.SMTP('smtp.gmail.com', 587)", "texto = request.POST['mensagem'] m = MIMEText(texto) m.set_charset('utf-8') m['Subject'] = email mail = smtplib.SMTP('smtp.gmail.com',", "'establishment.html', {'form': form,'title':title}) @login_required def create_voluntary(request): title = \"Cadastrar Voluntário\" if request.method ==", "form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = DonationForm()", "render(request, 'establishment.html', {'form': form,'title':title}) from .forms import CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm @login_required def", "HttpResponseRedirect from django.http import HttpResponse from django.contrib.auth.decorators import login_required from .forms import CommunityActionForm,", "print(volunteers) return render(request, 'list_voluntary.html', {\"volunteers\": volunteers}) def fale_conosco(request, id): servico = HealthService.objects.get(id=id) if", "establishments = Establishment.objects.all() print(establishments) return render(request, 'list_establishment.html', {\"establishments\": establishments}) def list_health_service(request): health_services =", "{'form': form,'title':title}) else: form = VoluntaryServiceForm() return render(request, 'establishment.html', {'form': form,'title':title}) from .forms", "return render(request, 'establishment.html', {'form': form,'title':title}) from .forms import CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm @login_required", "get_object_or_404(Donation, id=id) delete_donation.delete() return render(request, 'delete.html') @login_required def delete_voluntary(request, id): delete_voluntary = get_object_or_404(VoluntaryService,", "= Establishment.objects.get(id=id) address = div.endereco+\"+\"+div.cidade r = requests.get(\"https://maps.googleapis.com/maps/api/geocode/json?address=\"+address+\"&key=\"+chave) if r.status_code == 200: dados", "EstablishmentForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def create_voluntary(request): title = \"Cadastrar Voluntário\"", "'establishment.html', {'form': form,'title':title}) else: form = VoluntaryServiceForm() return render(request, 'establishment.html', {'form': form,'title':title}) from", "HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = VoluntaryServiceForm(instance=voluntary) return render(request, 'establishment.html',", "HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = VoluntaryServiceForm() return render(request, 'establishment.html',", "title = \"Cadastrar Serviço\" if request.method == \"POST\": form = HealthServiceForm(request.POST) if form.is_valid():", "django.http import HttpResponse from django.contrib.auth.decorators import login_required from .forms import CommunityActionForm, DonationForm, HealthServiceForm,", "establishment = get_object_or_404(Establishment, id=id) establishment.delete() return render(request, 'delete.html') @login_required def delete_health_service(request, id): health_service", "from .models import Establishment, HealthService, Donation, VoluntaryService # from .models import Divulgacoes import", "{'form': form,'title':title}) else: form = HealthServiceForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def", "volunteers}) def fale_conosco(request, id): servico = HealthService.objects.get(id=id) if request.method == \"POST\": nome =", "return render(request, 'establishment.html', {'form': form,'title':title}) else: form = HealthServiceForm(instance=health_service) return render(request, 'establishment.html', {'form':", "'establishment.html', {'form': form,'title':title}) from .forms import CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm @login_required def create_health_service(request):", "return render(request, 'delete.html') @login_required def delete_health_service(request, id): health_service = get_object_or_404(HealthService, id=id) health_service.delete() return", "<filename>divulga/views.py from email.mime.text import MIMEText from django.shortcuts import get_object_or_404 from django.shortcuts import render", "# return render(request , 'formevent.html') def mapa(request, id): latitude = 0 longitude =", "form,'title':title}) @login_required def edit_establishment(request, id): title = \"Editar Estabelecimento\" establishment = get_object_or_404(Establishment, id=id)", "django.shortcuts import get_object_or_404 from django.shortcuts import render from django.urls import reverse from django.http", "request.user # divulgacao.save() # return render(request , 'index.html') # return render(request , 'formevent.html')", "VoluntaryServiceForm(request.POST, instance=voluntary) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else:", "= get_object_or_404(Establishment, id=id) if request.method == \"POST\": form = EstablishmentForm(request.POST, instance=product) if form.is_valid():", "form,'title':title}) else: form = EstablishmentForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def create_voluntary(request):", "MIMEText from django.shortcuts import get_object_or_404 from django.shortcuts import render from django.urls import reverse", "== \"POST\": form = VoluntaryServiceForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html',", "# divulgacao.save() # return render(request , 'index.html') # return render(request , 'formevent.html') def", "if request.method == \"POST\": form = HealthServiceForm(request.POST, instance=health_service) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index'))", "{'form': form,'title':title}) @login_required def create_donation(request): title = \"Cadastrar Doação\" if request.method == \"POST\":", "return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = VoluntaryServiceForm(instance=voluntary) return render(request,", ".models import Establishment, HealthService, Donation, VoluntaryService # from .models import Divulgacoes import requests", "email.mime.text import MIMEText from django.shortcuts import get_object_or_404 from django.shortcuts import render from django.urls", "mail.sendmail('<EMAIL>', email, m.as_string()) return render(request, 'fale-conosco.html', {\"servico\": servico}) def perfil(request, id): servico =", "get_object_or_404(VoluntaryService, id=id) if request.method == \"POST\": form = VoluntaryServiceForm(request.POST, instance=voluntary) if form.is_valid(): form.save()", "def delete_donation(request, id): delete_donation = get_object_or_404(Donation, id=id) delete_donation.delete() return render(request, 'delete.html') @login_required def", ", 'index.html') # return render(request , 'formevent.html') def mapa(request, id): latitude = 0", "def delete_establishment(request, id): establishment = get_object_or_404(Establishment, id=id) establishment.delete() return render(request, 'delete.html') @login_required def", "{'form': form,'title':title}) else: form = EstablishmentForm(instance=establishment) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def", "= request.POST['horarioInicio'] # divulgacao.horarioFim = request.POST['horarioFim'] # divulgacao.data = request.POST['data'] # divulgacao.user =", "r.status_code == 200: dados = json.loads(r.content) latitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lat\"] longitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lng\"] #print(latitude)", "\"POST\": form = DonationForm(request.POST, instance=donation) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html',", "form = EstablishmentForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title})", "form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = HealthServiceForm(instance=health_service)", "{'form': form,'title':title}) else: form = EstablishmentForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def", "return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_establishment(request, id): title = \"Editar Estabelecimento\"", "else: form = DonationForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_establishment(request, id):", "form,'title':title}) else: form = DonationForm(instance=donation) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_voluntary(request,", "= dados[\"results\"][0][\"geometry\"][\"location\"][\"lat\"] longitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lng\"] #print(latitude) #print(longitude) return render(request, 'mapa.html', {'latitude':latitude, 'longitude': longitude})", "else: form = VoluntaryServiceForm(instance=voluntary) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def delete_establishment(request, id):", "render(request, 'establishment.html', {'form': form,'title':title}) @login_required def create_donation(request): title = \"Cadastrar Doação\" if request.method", "{'form': form,'title':title}) @login_required def create_voluntary(request): title = \"Cadastrar Voluntário\" if request.method == \"POST\":", "= email mail = smtplib.SMTP('smtp.gmail.com', 587) mail.ehlo() mail.starttls() mail.login('<EMAIL>', 'fiscaeunb') mail.sendmail('<EMAIL>', email, m.as_string())", "HealthServiceForm(request.POST, instance=health_service) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else:", "'delete.html') @login_required def delete_health_service(request, id): health_service = get_object_or_404(HealthService, id=id) health_service.delete() return render(request, 'delete.html')", "\"Editar Serviço de Saude\" health_service = get_object_or_404(HealthService, id=id) if request.method == \"POST\": form", "Voluntário\" if request.method == \"POST\": form = VoluntaryServiceForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\"))", "nome = request.POST['nome'] email = request.POST['email'] texto = request.POST['mensagem'] m = MIMEText(texto) m.set_charset('utf-8')", "request.POST['cep'] # divulgacao.telefone = request.POST['telefone'] # divulgacao.horarioInicio = request.POST['horarioInicio'] # divulgacao.horarioFim = request.POST['horarioFim']", "form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = EstablishmentForm() return", "django.http import HttpResponseRedirect from django.http import HttpResponse from django.contrib.auth.decorators import login_required from .forms", "dados[\"results\"][0][\"geometry\"][\"location\"][\"lat\"] longitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lng\"] #print(latitude) #print(longitude) return render(request, 'mapa.html', {'latitude':latitude, 'longitude': longitude}) #", "= div.endereco+\"+\"+div.cidade r = requests.get(\"https://maps.googleapis.com/maps/api/geocode/json?address=\"+address+\"&key=\"+chave) if r.status_code == 200: dados = json.loads(r.content) latitude", "HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = EstablishmentForm() return render(request, 'establishment.html',", "\"Editar Doação\" donation = get_object_or_404(Donation, id=id) if request.method == \"POST\": form = DonationForm(request.POST,", "render(request, 'delete.html') @login_required def delete_health_service(request, id): health_service = get_object_or_404(HealthService, id=id) health_service.delete() return render(request,", "edit_voluntary(request, id): title = \"Editar Voluntário\" voluntary = get_object_or_404(VoluntaryService, id=id) if request.method ==", "@login_required # def formevent(request): # if request.method == 'POST': # divulgacao = Establishment()", "request.POST['email'] texto = request.POST['mensagem'] m = MIMEText(texto) m.set_charset('utf-8') m['Subject'] = email mail =", "health_service = get_object_or_404(HealthService, id=id) if request.method == \"POST\": form = HealthServiceForm(request.POST, instance=health_service) if", "title = \"Editar Serviço de Saude\" health_service = get_object_or_404(HealthService, id=id) if request.method ==", "= HealthServiceForm(request.POST, instance=health_service) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title})", "id): title = \"Editar Doação\" donation = get_object_or_404(Donation, id=id) if request.method == \"POST\":", "'delete.html') @login_required def delete_voluntary(request, id): delete_voluntary = get_object_or_404(VoluntaryService, id=id) delete_voluntary.delete() return render(request, 'delete.html')", "\"Editar Voluntário\" voluntary = get_object_or_404(VoluntaryService, id=id) if request.method == \"POST\": form = VoluntaryServiceForm(request.POST,", "request.POST['data'] # divulgacao.user = request.user # divulgacao.save() # return render(request , 'index.html') #", "form = VoluntaryServiceForm(request.POST, instance=voluntary) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form':", "== \"POST\": chave = \"<KEY>\" div = Establishment.objects.get(id=id) address = div.endereco+\"+\"+div.cidade r =", "else: form = EstablishmentForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def create_voluntary(request): title", "if request.method == \"POST\": chave = \"<KEY>\" div = Establishment.objects.get(id=id) address = div.endereco+\"+\"+div.cidade", "divulgacao.data = request.POST['data'] # divulgacao.user = request.user # divulgacao.save() # return render(request ,", "create_health_service(request): title = \"Cadastrar Serviço\" if request.method == \"POST\": form = HealthServiceForm(request.POST) if", "= Donation.objects.all() return render(request, 'list_donation.html', {\"donations\": donations}) def list_voluntary(request): volunteers = VoluntaryService.objects.all() print(volunteers)", "def create_health_service(request): title = \"Cadastrar Serviço\" if request.method == \"POST\": form = HealthServiceForm(request.POST)", "Doação\" if request.method == \"POST\": form = DonationForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\"))", "= EstablishmentForm(request.POST, instance=product) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title})", "== \"POST\": form = DonationForm(request.POST, instance=donation) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request,", "= Establishment() # divulgacao.nomeEvento = request.POST['nomeEvento'] # divulgacao.categoria = request.POST['categoria'] # divulgacao.cidade =", "HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = HealthServiceForm() return render(request, 'establishment.html',", "delete_voluntary = get_object_or_404(VoluntaryService, id=id) delete_voluntary.delete() return render(request, 'delete.html') def list_establishment(request): establishments = Establishment.objects.all()", "delete_establishment(request, id): establishment = get_object_or_404(Establishment, id=id) establishment.delete() return render(request, 'delete.html') @login_required def delete_health_service(request,", "== \"POST\": form = HealthServiceForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html',", "form = VoluntaryServiceForm() return render(request, 'establishment.html', {'form': form,'title':title}) from .forms import CommunityActionForm, DonationForm,", "print(establishments) return render(request, 'list_establishment.html', {\"establishments\": establishments}) def list_health_service(request): health_services = HealthService.objects.all() return render(request,", "get_object_or_404(HealthService, id=id) health_service.delete() return render(request, 'delete.html') @login_required def delete_donation(request, id): delete_donation = get_object_or_404(Donation,", "HealthServiceForm, EstablishmentForm @login_required def create_health_service(request): title = \"Cadastrar Serviço\" if request.method == \"POST\":", "'establishment.html', {'form': form,'title':title}) @login_required def delete_establishment(request, id): establishment = get_object_or_404(Establishment, id=id) establishment.delete() return", "= get_object_or_404(VoluntaryService, id=id) delete_voluntary.delete() return render(request, 'delete.html') def list_establishment(request): establishments = Establishment.objects.all() print(establishments)", "from django.http import HttpResponse from django.contrib.auth.decorators import login_required from .forms import CommunityActionForm, DonationForm,", "form = DonationForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title})", "create_voluntary(request): title = \"Cadastrar Voluntário\" if request.method == \"POST\": form = VoluntaryServiceForm(request.POST) if", "email = request.POST['email'] texto = request.POST['mensagem'] m = MIMEText(texto) m.set_charset('utf-8') m['Subject'] = email", "= VoluntaryServiceForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else:", "= Donation.objects.all() return render(request, 'servicolist.html', {'estabelecimentos':estabelecimentos,'saude':saude,'doacao':doacao}) @login_required def create_establishment(request): title = \"Cadastrar Estabelecimento\"", "id=id) health_service.delete() return render(request, 'delete.html') @login_required def delete_donation(request, id): delete_donation = get_object_or_404(Donation, id=id)", "divulgacao.cidade = request.POST['cidade'] # #divulgacao.bairro = request.POST['bairro'] # divulgacao.endereco = request.POST['endereco'] # #divulgacao.cep", "m = MIMEText(texto) m.set_charset('utf-8') m['Subject'] = email mail = smtplib.SMTP('smtp.gmail.com', 587) mail.ehlo() mail.starttls()", "import login_required from .forms import CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm , VoluntaryServiceForm from .models", "= EstablishmentForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else:", "Divulgacoes import requests import urllib, json import smtplib # @login_required # def formevent(request):", "{'form': form,'title':title}) else: form = DonationForm(instance=donation) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def", "else: form = EstablishmentForm(instance=establishment) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_health_service(request, id):", "delete_voluntary(request, id): delete_voluntary = get_object_or_404(VoluntaryService, id=id) delete_voluntary.delete() return render(request, 'delete.html') def list_establishment(request): establishments", "= EstablishmentForm(instance=establishment) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_health_service(request, id): title =", "Donation.objects.all() return render(request, 'list_donation.html', {\"donations\": donations}) def list_voluntary(request): volunteers = VoluntaryService.objects.all() print(volunteers) return", "de Saude\" health_service = get_object_or_404(HealthService, id=id) if request.method == \"POST\": form = HealthServiceForm(request.POST,", "form,'title':title}) else: form = DonationForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_establishment(request,", "form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = EstablishmentForm()", "render(request , 'index.html') # return render(request , 'formevent.html') def mapa(request, id): latitude =", "form = VoluntaryServiceForm(instance=voluntary) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def delete_establishment(request, id): establishment", "VoluntaryService.objects.all() print(volunteers) return render(request, 'list_voluntary.html', {\"volunteers\": volunteers}) def fale_conosco(request, id): servico = HealthService.objects.get(id=id)", "'POST': # divulgacao = Establishment() # divulgacao.nomeEvento = request.POST['nomeEvento'] # divulgacao.categoria = request.POST['categoria']", "= HealthService.objects.all() doacao = Donation.objects.all() return render(request, 'servicolist.html', {'estabelecimentos':estabelecimentos,'saude':saude,'doacao':doacao}) @login_required def create_establishment(request): title", "id=id) if request.method == \"POST\": form = DonationForm(request.POST, instance=donation) if form.is_valid(): form.save() return", "def create_voluntary(request): title = \"Cadastrar Voluntário\" if request.method == \"POST\": form = VoluntaryServiceForm(request.POST)", "'establishment.html', {'form': form,'title':title}) @login_required def edit_voluntary(request, id): title = \"Editar Voluntário\" voluntary =", "reverse from django.http import HttpResponseRedirect from django.http import HttpResponse from django.contrib.auth.decorators import login_required", "MIMEText(texto) m.set_charset('utf-8') m['Subject'] = email mail = smtplib.SMTP('smtp.gmail.com', 587) mail.ehlo() mail.starttls() mail.login('<EMAIL>', 'fiscaeunb')", "get_object_or_404(HealthService, id=id) if request.method == \"POST\": form = HealthServiceForm(request.POST, instance=health_service) if form.is_valid(): form.save()", "= VoluntaryServiceForm(instance=voluntary) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def delete_establishment(request, id): establishment =", "create_establishment(request): title = \"Cadastrar Estabelecimento\" if request.method == \"POST\": form = EstablishmentForm(request.POST) if", "'establishment.html', {'form': form,'title':title}) @login_required def edit_donation(request, id): title = \"Editar Doação\" donation =", "divulgacao.nomeEvento = request.POST['nomeEvento'] # divulgacao.categoria = request.POST['categoria'] # divulgacao.cidade = request.POST['cidade'] # #divulgacao.bairro", "{\"health_services\": health_services}) def list_donation(request): donations = Donation.objects.all() return render(request, 'list_donation.html', {\"donations\": donations}) def", "divulgacao.horarioInicio = request.POST['horarioInicio'] # divulgacao.horarioFim = request.POST['horarioFim'] # divulgacao.data = request.POST['data'] # divulgacao.user", "return render(request, 'index.html') def servicolist(request): estabelecimentos = Establishment.objects.all() saude = HealthService.objects.all() doacao =", "request.method == \"POST\": nome = request.POST['nome'] email = request.POST['email'] texto = request.POST['mensagem'] m", "import urllib, json import smtplib # @login_required # def formevent(request): # if request.method", "'fiscaeunb') mail.sendmail('<EMAIL>', email, m.as_string()) return render(request, 'fale-conosco.html', {\"servico\": servico}) def perfil(request, id): servico", "return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_donation(request, id): title = \"Editar Doação\"", "render(request, 'list_establishment.html', {\"establishments\": establishments}) def list_health_service(request): health_services = HealthService.objects.all() return render(request, 'list_health_service.html', {\"health_services\":", "# from .models import Divulgacoes import requests import urllib, json import smtplib #", "'servicolist.html', {'estabelecimentos':estabelecimentos,'saude':saude,'doacao':doacao}) @login_required def create_establishment(request): title = \"Cadastrar Estabelecimento\" if request.method == \"POST\":", "mail.ehlo() mail.starttls() mail.login('<EMAIL>', 'fiscaeunb') mail.sendmail('<EMAIL>', email, m.as_string()) return render(request, 'fale-conosco.html', {\"servico\": servico}) def", "fale_conosco(request, id): servico = HealthService.objects.get(id=id) if request.method == \"POST\": nome = request.POST['nome'] email", "= request.POST['horarioFim'] # divulgacao.data = request.POST['data'] # divulgacao.user = request.user # divulgacao.save() #", "render(request , 'formevent.html') def mapa(request, id): latitude = 0 longitude = 0 #", "return render(request, 'delete.html') @login_required def delete_donation(request, id): delete_donation = get_object_or_404(Donation, id=id) delete_donation.delete() return", "\"POST\": chave = \"<KEY>\" div = Establishment.objects.get(id=id) address = div.endereco+\"+\"+div.cidade r = requests.get(\"https://maps.googleapis.com/maps/api/geocode/json?address=\"+address+\"&key=\"+chave)", "\"POST\": form = VoluntaryServiceForm(request.POST, instance=voluntary) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html',", "= VoluntaryServiceForm(request.POST, instance=voluntary) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title})", "# return render(request , 'index.html') # return render(request , 'formevent.html') def mapa(request, id):", "get_object_or_404(Establishment, id=id) if request.method == \"POST\": form = EstablishmentForm(request.POST, instance=product) if form.is_valid(): form.save()", "from .forms import CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm , VoluntaryServiceForm from .models import Establishment,", "return render(request, 'establishment.html', {'form': form,'title':title}) else: form = DonationForm() return render(request, 'establishment.html', {'form':", "#print(latitude) #print(longitude) return render(request, 'mapa.html', {'latitude':latitude, 'longitude': longitude}) # return render(request, 'index.html') def", "establishment.delete() return render(request, 'delete.html') @login_required def delete_health_service(request, id): health_service = get_object_or_404(HealthService, id=id) health_service.delete()", "'list_health_service.html', {\"health_services\": health_services}) def list_donation(request): donations = Donation.objects.all() return render(request, 'list_donation.html', {\"donations\": donations})", "form = EstablishmentForm(request.POST, instance=product) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form':", "estabelecimentos = Establishment.objects.all() saude = HealthService.objects.all() doacao = Donation.objects.all() return render(request, 'servicolist.html', {'estabelecimentos':estabelecimentos,'saude':saude,'doacao':doacao})", "render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_health_service(request, id): title = \"Editar Serviço de", "form,'title':title}) @login_required def edit_health_service(request, id): title = \"Editar Serviço de Saude\" health_service =", "HealthService.objects.all() return render(request, 'list_health_service.html', {\"health_services\": health_services}) def list_donation(request): donations = Donation.objects.all() return render(request,", "if request.method == \"POST\": form = VoluntaryServiceForm(request.POST, instance=voluntary) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index'))", "EstablishmentForm , VoluntaryServiceForm from .models import Establishment, HealthService, Donation, VoluntaryService # from .models", "requests.get(\"https://maps.googleapis.com/maps/api/geocode/json?address=\"+address+\"&key=\"+chave) if r.status_code == 200: dados = json.loads(r.content) latitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lat\"] longitude =", "'list_establishment.html', {\"establishments\": establishments}) def list_health_service(request): health_services = HealthService.objects.all() return render(request, 'list_health_service.html', {\"health_services\": health_services})", "= \"Editar Serviço de Saude\" health_service = get_object_or_404(HealthService, id=id) if request.method == \"POST\":", "edit_donation(request, id): title = \"Editar Doação\" donation = get_object_or_404(Donation, id=id) if request.method ==", "else: form = HealthServiceForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def create_donation(request): title", "donation = get_object_or_404(Donation, id=id) if request.method == \"POST\": form = DonationForm(request.POST, instance=donation) if", "== \"POST\": form = VoluntaryServiceForm(request.POST, instance=voluntary) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request,", "HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = EstablishmentForm(instance=establishment) return render(request, 'establishment.html',", "def edit_establishment(request, id): title = \"Editar Estabelecimento\" establishment = get_object_or_404(Establishment, id=id) if request.method", "m.as_string()) return render(request, 'fale-conosco.html', {\"servico\": servico}) def perfil(request, id): servico = HealthService.objects.get(id=id) return", "import render from django.urls import reverse from django.http import HttpResponseRedirect from django.http import", "if request.method == \"POST\": form = HealthServiceForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return", "@login_required def create_donation(request): title = \"Cadastrar Doação\" if request.method == \"POST\": form =", "form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = VoluntaryServiceForm(instance=voluntary) return", "EstablishmentForm(instance=establishment) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_health_service(request, id): title = \"Editar", "m['Subject'] = email mail = smtplib.SMTP('smtp.gmail.com', 587) mail.ehlo() mail.starttls() mail.login('<EMAIL>', 'fiscaeunb') mail.sendmail('<EMAIL>', email,", "doacao = Donation.objects.all() return render(request, 'servicolist.html', {'estabelecimentos':estabelecimentos,'saude':saude,'doacao':doacao}) @login_required def create_establishment(request): title = \"Cadastrar", "email mail = smtplib.SMTP('smtp.gmail.com', 587) mail.ehlo() mail.starttls() mail.login('<EMAIL>', 'fiscaeunb') mail.sendmail('<EMAIL>', email, m.as_string()) return", "django.urls import reverse from django.http import HttpResponseRedirect from django.http import HttpResponse from django.contrib.auth.decorators", "return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def create_voluntary(request): title = \"Cadastrar Voluntário\" if", "DonationForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form", "if request.method == \"POST\": nome = request.POST['nome'] email = request.POST['email'] texto = request.POST['mensagem']", "= HealthServiceForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else:", "instance=voluntary) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form", "request.POST['horarioFim'] # divulgacao.data = request.POST['data'] # divulgacao.user = request.user # divulgacao.save() # return", "return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = EstablishmentForm(instance=establishment) return render(request,", "return render(request, 'delete.html') @login_required def delete_voluntary(request, id): delete_voluntary = get_object_or_404(VoluntaryService, id=id) delete_voluntary.delete() return", "DonationForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_establishment(request, id): title = \"Editar", "def delete_voluntary(request, id): delete_voluntary = get_object_or_404(VoluntaryService, id=id) delete_voluntary.delete() return render(request, 'delete.html') def list_establishment(request):", "form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = DonationForm(instance=donation) return", "form,'title':title}) @login_required def edit_donation(request, id): title = \"Editar Doação\" donation = get_object_or_404(Donation, id=id)", "return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_voluntary(request, id): title = \"Editar Voluntário\"", "{\"volunteers\": volunteers}) def fale_conosco(request, id): servico = HealthService.objects.get(id=id) if request.method == \"POST\": nome", "login_required from .forms import CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm , VoluntaryServiceForm from .models import", "\"Cadastrar Serviço\" if request.method == \"POST\": form = HealthServiceForm(request.POST) if form.is_valid(): form.save() return", "= request.user # divulgacao.save() # return render(request , 'index.html') # return render(request ,", "= \"Cadastrar Voluntário\" if request.method == \"POST\": form = VoluntaryServiceForm(request.POST) if form.is_valid(): form.save()", "@login_required def create_health_service(request): title = \"Cadastrar Serviço\" if request.method == \"POST\": form =", ", VoluntaryServiceForm from .models import Establishment, HealthService, Donation, VoluntaryService # from .models import", "delete_donation(request, id): delete_donation = get_object_or_404(Donation, id=id) delete_donation.delete() return render(request, 'delete.html') @login_required def delete_voluntary(request,", "import Establishment, HealthService, Donation, VoluntaryService # from .models import Divulgacoes import requests import", "saude = HealthService.objects.all() doacao = Donation.objects.all() return render(request, 'servicolist.html', {'estabelecimentos':estabelecimentos,'saude':saude,'doacao':doacao}) @login_required def create_establishment(request):", "else: form = VoluntaryServiceForm() return render(request, 'establishment.html', {'form': form,'title':title}) from .forms import CommunityActionForm,", "import Divulgacoes import requests import urllib, json import smtplib # @login_required # def", "title = \"Editar Voluntário\" voluntary = get_object_or_404(VoluntaryService, id=id) if request.method == \"POST\": form", ".models import Divulgacoes import requests import urllib, json import smtplib # @login_required #", "'establishment.html', {'form': form,'title':title}) else: form = DonationForm(instance=donation) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required", "longitude}) # return render(request, 'index.html') def servicolist(request): estabelecimentos = Establishment.objects.all() saude = HealthService.objects.all()", "json import smtplib # @login_required # def formevent(request): # if request.method == 'POST':", "list_establishment(request): establishments = Establishment.objects.all() print(establishments) return render(request, 'list_establishment.html', {\"establishments\": establishments}) def list_health_service(request): health_services", "return render(request , 'index.html') # return render(request , 'formevent.html') def mapa(request, id): latitude", "@login_required def edit_donation(request, id): title = \"Editar Doação\" donation = get_object_or_404(Donation, id=id) if", "#print(longitude) return render(request, 'mapa.html', {'latitude':latitude, 'longitude': longitude}) # return render(request, 'index.html') def servicolist(request):", "HealthServiceForm, EstablishmentForm , VoluntaryServiceForm from .models import Establishment, HealthService, Donation, VoluntaryService # from", "mapa(request, id): latitude = 0 longitude = 0 # if request.method == \"POST\":", "# divulgacao.horarioFim = request.POST['horarioFim'] # divulgacao.data = request.POST['data'] # divulgacao.user = request.user #", "json.loads(r.content) latitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lat\"] longitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lng\"] #print(latitude) #print(longitude) return render(request, 'mapa.html', {'latitude':latitude,", "email, m.as_string()) return render(request, 'fale-conosco.html', {\"servico\": servico}) def perfil(request, id): servico = HealthService.objects.get(id=id)", "if request.method == \"POST\": form = DonationForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return", "= HealthServiceForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def create_donation(request): title = \"Cadastrar", "health_services}) def list_donation(request): donations = Donation.objects.all() return render(request, 'list_donation.html', {\"donations\": donations}) def list_voluntary(request):", "DonationForm(request.POST, instance=donation) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else:", "from email.mime.text import MIMEText from django.shortcuts import get_object_or_404 from django.shortcuts import render from", "{'form': form,'title':title}) from .forms import CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm @login_required def create_health_service(request): title", "{'form': form,'title':title}) else: form = DonationForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def", "form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = VoluntaryServiceForm(instance=voluntary)", "form = EstablishmentForm(instance=establishment) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_health_service(request, id): title", "div.endereco+\"+\"+div.cidade r = requests.get(\"https://maps.googleapis.com/maps/api/geocode/json?address=\"+address+\"&key=\"+chave) if r.status_code == 200: dados = json.loads(r.content) latitude =", "# #divulgacao.bairro = request.POST['bairro'] # divulgacao.endereco = request.POST['endereco'] # #divulgacao.cep = request.POST['cep'] #", "render(request, 'list_voluntary.html', {\"volunteers\": volunteers}) def fale_conosco(request, id): servico = HealthService.objects.get(id=id) if request.method ==", "'list_donation.html', {\"donations\": donations}) def list_voluntary(request): volunteers = VoluntaryService.objects.all() print(volunteers) return render(request, 'list_voluntary.html', {\"volunteers\":", "@login_required def delete_donation(request, id): delete_donation = get_object_or_404(Donation, id=id) delete_donation.delete() return render(request, 'delete.html') @login_required", "= \"Editar Doação\" donation = get_object_or_404(Donation, id=id) if request.method == \"POST\": form =", "list_health_service(request): health_services = HealthService.objects.all() return render(request, 'list_health_service.html', {\"health_services\": health_services}) def list_donation(request): donations =", "{'form': form,'title':title}) @login_required def edit_donation(request, id): title = \"Editar Doação\" donation = get_object_or_404(Donation,", "form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = DonationForm(instance=donation)", "# if request.method == \"POST\": chave = \"<KEY>\" div = Establishment.objects.get(id=id) address =", "form,'title':title}) else: form = EstablishmentForm(instance=establishment) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_health_service(request,", "from django.http import HttpResponseRedirect from django.http import HttpResponse from django.contrib.auth.decorators import login_required from", "{'form': form,'title':title}) @login_required def delete_establishment(request, id): establishment = get_object_or_404(Establishment, id=id) establishment.delete() return render(request,", "render(request, 'establishment.html', {'form': form,'title':title}) @login_required def delete_establishment(request, id): establishment = get_object_or_404(Establishment, id=id) establishment.delete()", "def list_establishment(request): establishments = Establishment.objects.all() print(establishments) return render(request, 'list_establishment.html', {\"establishments\": establishments}) def list_health_service(request):", "from .models import Divulgacoes import requests import urllib, json import smtplib # @login_required", "form = HealthServiceForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title})", "request.POST['mensagem'] m = MIMEText(texto) m.set_charset('utf-8') m['Subject'] = email mail = smtplib.SMTP('smtp.gmail.com', 587) mail.ehlo()", "form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = VoluntaryServiceForm() return", "return render(request, 'list_health_service.html', {\"health_services\": health_services}) def list_donation(request): donations = Donation.objects.all() return render(request, 'list_donation.html',", "DonationForm, HealthServiceForm, EstablishmentForm @login_required def create_health_service(request): title = \"Cadastrar Serviço\" if request.method ==", "def list_voluntary(request): volunteers = VoluntaryService.objects.all() print(volunteers) return render(request, 'list_voluntary.html', {\"volunteers\": volunteers}) def fale_conosco(request,", "Establishment.objects.get(id=id) address = div.endereco+\"+\"+div.cidade r = requests.get(\"https://maps.googleapis.com/maps/api/geocode/json?address=\"+address+\"&key=\"+chave) if r.status_code == 200: dados =", "import requests import urllib, json import smtplib # @login_required # def formevent(request): #", "EstablishmentForm(request.POST, instance=product) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else:", "health_service.delete() return render(request, 'delete.html') @login_required def delete_donation(request, id): delete_donation = get_object_or_404(Donation, id=id) delete_donation.delete()", "= DonationForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else:", "request.POST['bairro'] # divulgacao.endereco = request.POST['endereco'] # #divulgacao.cep = request.POST['cep'] # divulgacao.telefone = request.POST['telefone']", "@login_required def delete_establishment(request, id): establishment = get_object_or_404(Establishment, id=id) establishment.delete() return render(request, 'delete.html') @login_required", "from django.contrib.auth.decorators import login_required from .forms import CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm , VoluntaryServiceForm", "divulgacao = Establishment() # divulgacao.nomeEvento = request.POST['nomeEvento'] # divulgacao.categoria = request.POST['categoria'] # divulgacao.cidade", "formevent(request): # if request.method == 'POST': # divulgacao = Establishment() # divulgacao.nomeEvento =", "return render(request, 'establishment.html', {'form': form,'title':title}) else: form = VoluntaryServiceForm() return render(request, 'establishment.html', {'form':", "= DonationForm(instance=donation) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_voluntary(request, id): title =", "request.method == \"POST\": form = DonationForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request,", "587) mail.ehlo() mail.starttls() mail.login('<EMAIL>', 'fiscaeunb') mail.sendmail('<EMAIL>', email, m.as_string()) return render(request, 'fale-conosco.html', {\"servico\": servico})", "request.POST['nome'] email = request.POST['email'] texto = request.POST['mensagem'] m = MIMEText(texto) m.set_charset('utf-8') m['Subject'] =", "Estabelecimento\" if request.method == \"POST\": form = EstablishmentForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\"))", "render(request, 'establishment.html', {'form': form,'title':title}) else: form = HealthServiceForm() return render(request, 'establishment.html', {'form': form,'title':title})", "return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = VoluntaryServiceForm() return render(request,", "HealthService.objects.get(id=id) if request.method == \"POST\": nome = request.POST['nome'] email = request.POST['email'] texto =", "{\"establishments\": establishments}) def list_health_service(request): health_services = HealthService.objects.all() return render(request, 'list_health_service.html', {\"health_services\": health_services}) def", "import smtplib # @login_required # def formevent(request): # if request.method == 'POST': #", "edit_health_service(request, id): title = \"Editar Serviço de Saude\" health_service = get_object_or_404(HealthService, id=id) if", "form = HealthServiceForm(instance=health_service) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_donation(request, id): title", "def edit_health_service(request, id): title = \"Editar Serviço de Saude\" health_service = get_object_or_404(HealthService, id=id)", "health_service = get_object_or_404(HealthService, id=id) health_service.delete() return render(request, 'delete.html') @login_required def delete_donation(request, id): delete_donation", "id): title = \"Editar Serviço de Saude\" health_service = get_object_or_404(HealthService, id=id) if request.method", "= 0 # if request.method == \"POST\": chave = \"<KEY>\" div = Establishment.objects.get(id=id)", "= HealthServiceForm(instance=health_service) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_donation(request, id): title =", "form = DonationForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_establishment(request, id): title", "delete_donation = get_object_or_404(Donation, id=id) delete_donation.delete() return render(request, 'delete.html') @login_required def delete_voluntary(request, id): delete_voluntary", "render(request, 'mapa.html', {'latitude':latitude, 'longitude': longitude}) # return render(request, 'index.html') def servicolist(request): estabelecimentos =", "urllib, json import smtplib # @login_required # def formevent(request): # if request.method ==", "render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_voluntary(request, id): title = \"Editar Voluntário\" voluntary", "@login_required def edit_voluntary(request, id): title = \"Editar Voluntário\" voluntary = get_object_or_404(VoluntaryService, id=id) if", "instance=health_service) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form", "request.POST['cidade'] # #divulgacao.bairro = request.POST['bairro'] # divulgacao.endereco = request.POST['endereco'] # #divulgacao.cep = request.POST['cep']", "'longitude': longitude}) # return render(request, 'index.html') def servicolist(request): estabelecimentos = Establishment.objects.all() saude =", "request.method == \"POST\": form = DonationForm(request.POST, instance=donation) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return", "# divulgacao.cidade = request.POST['cidade'] # #divulgacao.bairro = request.POST['bairro'] # divulgacao.endereco = request.POST['endereco'] #", "#divulgacao.bairro = request.POST['bairro'] # divulgacao.endereco = request.POST['endereco'] # #divulgacao.cep = request.POST['cep'] # divulgacao.telefone", "div = Establishment.objects.get(id=id) address = div.endereco+\"+\"+div.cidade r = requests.get(\"https://maps.googleapis.com/maps/api/geocode/json?address=\"+address+\"&key=\"+chave) if r.status_code == 200:", "if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form =", "requests import urllib, json import smtplib # @login_required # def formevent(request): # if", "voluntary = get_object_or_404(VoluntaryService, id=id) if request.method == \"POST\": form = VoluntaryServiceForm(request.POST, instance=voluntary) if", "render(request, 'establishment.html', {'form': form,'title':title}) else: form = DonationForm(instance=donation) return render(request, 'establishment.html', {'form': form,'title':title})", "get_object_or_404 from django.shortcuts import render from django.urls import reverse from django.http import HttpResponseRedirect", "request.POST['nomeEvento'] # divulgacao.categoria = request.POST['categoria'] # divulgacao.cidade = request.POST['cidade'] # #divulgacao.bairro = request.POST['bairro']", "id=id) if request.method == \"POST\": form = HealthServiceForm(request.POST, instance=health_service) if form.is_valid(): form.save() return", "from django.urls import reverse from django.http import HttpResponseRedirect from django.http import HttpResponse from", "VoluntaryService # from .models import Divulgacoes import requests import urllib, json import smtplib", "render(request, 'servicolist.html', {'estabelecimentos':estabelecimentos,'saude':saude,'doacao':doacao}) @login_required def create_establishment(request): title = \"Cadastrar Estabelecimento\" if request.method ==", "return render(request, 'list_establishment.html', {\"establishments\": establishments}) def list_health_service(request): health_services = HealthService.objects.all() return render(request, 'list_health_service.html',", "= request.POST['nomeEvento'] # divulgacao.categoria = request.POST['categoria'] # divulgacao.cidade = request.POST['cidade'] # #divulgacao.bairro =", "list_donation(request): donations = Donation.objects.all() return render(request, 'list_donation.html', {\"donations\": donations}) def list_voluntary(request): volunteers =", "Estabelecimento\" establishment = get_object_or_404(Establishment, id=id) if request.method == \"POST\": form = EstablishmentForm(request.POST, instance=product)", "0 longitude = 0 # if request.method == \"POST\": chave = \"<KEY>\" div", "# divulgacao.endereco = request.POST['endereco'] # #divulgacao.cep = request.POST['cep'] # divulgacao.telefone = request.POST['telefone'] #", "import HttpResponseRedirect from django.http import HttpResponse from django.contrib.auth.decorators import login_required from .forms import", "import reverse from django.http import HttpResponseRedirect from django.http import HttpResponse from django.contrib.auth.decorators import", "'establishment.html', {'form': form,'title':title}) else: form = EstablishmentForm(instance=establishment) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required", "form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = HealthServiceForm()", "django.shortcuts import render from django.urls import reverse from django.http import HttpResponseRedirect from django.http", "dados[\"results\"][0][\"geometry\"][\"location\"][\"lng\"] #print(latitude) #print(longitude) return render(request, 'mapa.html', {'latitude':latitude, 'longitude': longitude}) # return render(request, 'index.html')", "HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = DonationForm(instance=donation) return render(request, 'establishment.html',", "== 200: dados = json.loads(r.content) latitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lat\"] longitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lng\"] #print(latitude) #print(longitude)", "200: dados = json.loads(r.content) latitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lat\"] longitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lng\"] #print(latitude) #print(longitude) return", "= \"Editar Voluntário\" voluntary = get_object_or_404(VoluntaryService, id=id) if request.method == \"POST\": form =", "def fale_conosco(request, id): servico = HealthService.objects.get(id=id) if request.method == \"POST\": nome = request.POST['nome']", "id): latitude = 0 longitude = 0 # if request.method == \"POST\": chave", "#divulgacao.cep = request.POST['cep'] # divulgacao.telefone = request.POST['telefone'] # divulgacao.horarioInicio = request.POST['horarioInicio'] # divulgacao.horarioFim", "get_object_or_404(VoluntaryService, id=id) delete_voluntary.delete() return render(request, 'delete.html') def list_establishment(request): establishments = Establishment.objects.all() print(establishments) return", "= request.POST['email'] texto = request.POST['mensagem'] m = MIMEText(texto) m.set_charset('utf-8') m['Subject'] = email mail", "render from django.urls import reverse from django.http import HttpResponseRedirect from django.http import HttpResponse", "= MIMEText(texto) m.set_charset('utf-8') m['Subject'] = email mail = smtplib.SMTP('smtp.gmail.com', 587) mail.ehlo() mail.starttls() mail.login('<EMAIL>',", "= \"<KEY>\" div = Establishment.objects.get(id=id) address = div.endereco+\"+\"+div.cidade r = requests.get(\"https://maps.googleapis.com/maps/api/geocode/json?address=\"+address+\"&key=\"+chave) if r.status_code", "form,'title':title}) @login_required def create_voluntary(request): title = \"Cadastrar Voluntário\" if request.method == \"POST\": form", "= get_object_or_404(HealthService, id=id) health_service.delete() return render(request, 'delete.html') @login_required def delete_donation(request, id): delete_donation =", "\"Cadastrar Voluntário\" if request.method == \"POST\": form = VoluntaryServiceForm(request.POST) if form.is_valid(): form.save() return", "= get_object_or_404(HealthService, id=id) if request.method == \"POST\": form = HealthServiceForm(request.POST, instance=health_service) if form.is_valid():", "HealthService.objects.all() doacao = Donation.objects.all() return render(request, 'servicolist.html', {'estabelecimentos':estabelecimentos,'saude':saude,'doacao':doacao}) @login_required def create_establishment(request): title =", "render(request, 'delete.html') @login_required def delete_voluntary(request, id): delete_voluntary = get_object_or_404(VoluntaryService, id=id) delete_voluntary.delete() return render(request,", "# divulgacao.nomeEvento = request.POST['nomeEvento'] # divulgacao.categoria = request.POST['categoria'] # divulgacao.cidade = request.POST['cidade'] #", "def mapa(request, id): latitude = 0 longitude = 0 # if request.method ==", "= request.POST['telefone'] # divulgacao.horarioInicio = request.POST['horarioInicio'] # divulgacao.horarioFim = request.POST['horarioFim'] # divulgacao.data =", "return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = DonationForm(instance=donation) return render(request,", ", 'formevent.html') def mapa(request, id): latitude = 0 longitude = 0 # if", "form,'title':title}) @login_required def edit_voluntary(request, id): title = \"Editar Voluntário\" voluntary = get_object_or_404(VoluntaryService, id=id)", "{\"donations\": donations}) def list_voluntary(request): volunteers = VoluntaryService.objects.all() print(volunteers) return render(request, 'list_voluntary.html', {\"volunteers\": volunteers})", "\"Editar Estabelecimento\" establishment = get_object_or_404(Establishment, id=id) if request.method == \"POST\": form = EstablishmentForm(request.POST,", "import get_object_or_404 from django.shortcuts import render from django.urls import reverse from django.http import", "def list_health_service(request): health_services = HealthService.objects.all() return render(request, 'list_health_service.html', {\"health_services\": health_services}) def list_donation(request): donations", "smtplib.SMTP('smtp.gmail.com', 587) mail.ehlo() mail.starttls() mail.login('<EMAIL>', 'fiscaeunb') mail.sendmail('<EMAIL>', email, m.as_string()) return render(request, 'fale-conosco.html', {\"servico\":", "def edit_voluntary(request, id): title = \"Editar Voluntário\" voluntary = get_object_or_404(VoluntaryService, id=id) if request.method", "{'latitude':latitude, 'longitude': longitude}) # return render(request, 'index.html') def servicolist(request): estabelecimentos = Establishment.objects.all() saude", "Serviço de Saude\" health_service = get_object_or_404(HealthService, id=id) if request.method == \"POST\": form =", "request.method == \"POST\": form = EstablishmentForm(request.POST, instance=product) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return", "= Establishment.objects.all() saude = HealthService.objects.all() doacao = Donation.objects.all() return render(request, 'servicolist.html', {'estabelecimentos':estabelecimentos,'saude':saude,'doacao':doacao}) @login_required", "if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form =", "edit_establishment(request, id): title = \"Editar Estabelecimento\" establishment = get_object_or_404(Establishment, id=id) if request.method ==", "= DonationForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_establishment(request, id): title =", "return render(request, 'establishment.html', {'form': form,'title':title}) else: form = DonationForm(instance=donation) return render(request, 'establishment.html', {'form':", "id): delete_donation = get_object_or_404(Donation, id=id) delete_donation.delete() return render(request, 'delete.html') @login_required def delete_voluntary(request, id):", "'establishment.html', {'form': form,'title':title}) else: form = HealthServiceForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required", "= dados[\"results\"][0][\"geometry\"][\"location\"][\"lng\"] #print(latitude) #print(longitude) return render(request, 'mapa.html', {'latitude':latitude, 'longitude': longitude}) # return render(request,", "= EstablishmentForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def create_voluntary(request): title = \"Cadastrar", "'establishment.html', {'form': form,'title':title}) else: form = DonationForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required", "@login_required def create_establishment(request): title = \"Cadastrar Estabelecimento\" if request.method == \"POST\": form =", "== \"POST\": form = EstablishmentForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html',", "\"POST\": form = VoluntaryServiceForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form':", "divulgacao.user = request.user # divulgacao.save() # return render(request , 'index.html') # return render(request", "Establishment.objects.all() saude = HealthService.objects.all() doacao = Donation.objects.all() return render(request, 'servicolist.html', {'estabelecimentos':estabelecimentos,'saude':saude,'doacao':doacao}) @login_required def", "@login_required def delete_health_service(request, id): health_service = get_object_or_404(HealthService, id=id) health_service.delete() return render(request, 'delete.html') @login_required", "id=id) establishment.delete() return render(request, 'delete.html') @login_required def delete_health_service(request, id): health_service = get_object_or_404(HealthService, id=id)", "chave = \"<KEY>\" div = Establishment.objects.get(id=id) address = div.endereco+\"+\"+div.cidade r = requests.get(\"https://maps.googleapis.com/maps/api/geocode/json?address=\"+address+\"&key=\"+chave) if", "render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_donation(request, id): title = \"Editar Doação\" donation", "= get_object_or_404(Donation, id=id) if request.method == \"POST\": form = DonationForm(request.POST, instance=donation) if form.is_valid():", "render(request, 'establishment.html', {'form': form,'title':title}) else: form = HealthServiceForm(instance=health_service) return render(request, 'establishment.html', {'form': form,'title':title})", "m.set_charset('utf-8') m['Subject'] = email mail = smtplib.SMTP('smtp.gmail.com', 587) mail.ehlo() mail.starttls() mail.login('<EMAIL>', 'fiscaeunb') mail.sendmail('<EMAIL>',", "HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = HealthServiceForm(instance=health_service) return render(request, 'establishment.html',", "{'form': form,'title':title}) else: form = HealthServiceForm(instance=health_service) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def", "instance=product) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form", "delete_health_service(request, id): health_service = get_object_or_404(HealthService, id=id) health_service.delete() return render(request, 'delete.html') @login_required def delete_donation(request,", "if r.status_code == 200: dados = json.loads(r.content) latitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lat\"] longitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lng\"]", "if request.method == \"POST\": form = DonationForm(request.POST, instance=donation) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index'))", "= request.POST['categoria'] # divulgacao.cidade = request.POST['cidade'] # #divulgacao.bairro = request.POST['bairro'] # divulgacao.endereco =", "return render(request, 'delete.html') def list_establishment(request): establishments = Establishment.objects.all() print(establishments) return render(request, 'list_establishment.html', {\"establishments\":", "id=id) delete_voluntary.delete() return render(request, 'delete.html') def list_establishment(request): establishments = Establishment.objects.all() print(establishments) return render(request,", "== \"POST\": nome = request.POST['nome'] email = request.POST['email'] texto = request.POST['mensagem'] m =", "form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = VoluntaryServiceForm()", "VoluntaryServiceForm() return render(request, 'establishment.html', {'form': form,'title':title}) from .forms import CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm", "request.method == \"POST\": form = HealthServiceForm(request.POST, instance=health_service) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return", "render(request, 'delete.html') @login_required def delete_donation(request, id): delete_donation = get_object_or_404(Donation, id=id) delete_donation.delete() return render(request,", "= request.POST['cidade'] # #divulgacao.bairro = request.POST['bairro'] # divulgacao.endereco = request.POST['endereco'] # #divulgacao.cep =", ".forms import CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm @login_required def create_health_service(request): title = \"Cadastrar Serviço\"", "# divulgacao = Establishment() # divulgacao.nomeEvento = request.POST['nomeEvento'] # divulgacao.categoria = request.POST['categoria'] #", "longitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lng\"] #print(latitude) #print(longitude) return render(request, 'mapa.html', {'latitude':latitude, 'longitude': longitude}) # return", "# #divulgacao.cep = request.POST['cep'] # divulgacao.telefone = request.POST['telefone'] # divulgacao.horarioInicio = request.POST['horarioInicio'] #", "render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_establishment(request, id): title = \"Editar Estabelecimento\" establishment", "return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = EstablishmentForm() return render(request,", "request.method == \"POST\": form = VoluntaryServiceForm(request.POST, instance=voluntary) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return", "address = div.endereco+\"+\"+div.cidade r = requests.get(\"https://maps.googleapis.com/maps/api/geocode/json?address=\"+address+\"&key=\"+chave) if r.status_code == 200: dados = json.loads(r.content)", "Saude\" health_service = get_object_or_404(HealthService, id=id) if request.method == \"POST\": form = HealthServiceForm(request.POST, instance=health_service)", "dados = json.loads(r.content) latitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lat\"] longitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lng\"] #print(latitude) #print(longitude) return render(request,", "id=id) if request.method == \"POST\": form = EstablishmentForm(request.POST, instance=product) if form.is_valid(): form.save() return", "VoluntaryServiceForm from .models import Establishment, HealthService, Donation, VoluntaryService # from .models import Divulgacoes", "latitude = 0 longitude = 0 # if request.method == \"POST\": chave =", "servicolist(request): estabelecimentos = Establishment.objects.all() saude = HealthService.objects.all() doacao = Donation.objects.all() return render(request, 'servicolist.html',", "\"POST\": form = HealthServiceForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form':", "else: form = DonationForm(instance=donation) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_voluntary(request, id):", "title = \"Editar Estabelecimento\" establishment = get_object_or_404(Establishment, id=id) if request.method == \"POST\": form", "== \"POST\": form = EstablishmentForm(request.POST, instance=product) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request,", "else: form = HealthServiceForm(instance=health_service) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_donation(request, id):", "\"Cadastrar Doação\" if request.method == \"POST\": form = DonationForm(request.POST) if form.is_valid(): form.save() return", "render(request, 'index.html') def servicolist(request): estabelecimentos = Establishment.objects.all() saude = HealthService.objects.all() doacao = Donation.objects.all()", "VoluntaryServiceForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form", "def servicolist(request): estabelecimentos = Establishment.objects.all() saude = HealthService.objects.all() doacao = Donation.objects.all() return render(request,", "return render(request, 'list_donation.html', {\"donations\": donations}) def list_voluntary(request): volunteers = VoluntaryService.objects.all() print(volunteers) return render(request,", "\"POST\": form = EstablishmentForm(request.POST, instance=product) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html',", "return render(request, 'list_voluntary.html', {\"volunteers\": volunteers}) def fale_conosco(request, id): servico = HealthService.objects.get(id=id) if request.method", "Doação\" donation = get_object_or_404(Donation, id=id) if request.method == \"POST\": form = DonationForm(request.POST, instance=donation)", "VoluntaryServiceForm(instance=voluntary) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def delete_establishment(request, id): establishment = get_object_or_404(Establishment,", "form,'title':title}) else: form = VoluntaryServiceForm(instance=voluntary) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def delete_establishment(request,", "= HealthService.objects.all() return render(request, 'list_health_service.html', {\"health_services\": health_services}) def list_donation(request): donations = Donation.objects.all() return", "= get_object_or_404(Establishment, id=id) establishment.delete() return render(request, 'delete.html') @login_required def delete_health_service(request, id): health_service =", "= \"Cadastrar Estabelecimento\" if request.method == \"POST\": form = EstablishmentForm(request.POST) if form.is_valid(): form.save()", "form = DonationForm(request.POST, instance=donation) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form':", "= get_object_or_404(VoluntaryService, id=id) if request.method == \"POST\": form = VoluntaryServiceForm(request.POST, instance=voluntary) if form.is_valid():", "get_object_or_404(Donation, id=id) if request.method == \"POST\": form = DonationForm(request.POST, instance=donation) if form.is_valid(): form.save()", "if request.method == \"POST\": form = EstablishmentForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return", "= \"Cadastrar Serviço\" if request.method == \"POST\": form = HealthServiceForm(request.POST) if form.is_valid(): form.save()", "= 0 longitude = 0 # if request.method == \"POST\": chave = \"<KEY>\"", "'mapa.html', {'latitude':latitude, 'longitude': longitude}) # return render(request, 'index.html') def servicolist(request): estabelecimentos = Establishment.objects.all()", "Establishment() # divulgacao.nomeEvento = request.POST['nomeEvento'] # divulgacao.categoria = request.POST['categoria'] # divulgacao.cidade = request.POST['cidade']", "'establishment.html', {'form': form,'title':title}) else: form = HealthServiceForm(instance=health_service) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required", "= request.POST['endereco'] # #divulgacao.cep = request.POST['cep'] # divulgacao.telefone = request.POST['telefone'] # divulgacao.horarioInicio =", "'delete.html') @login_required def delete_donation(request, id): delete_donation = get_object_or_404(Donation, id=id) delete_donation.delete() return render(request, 'delete.html')", "def create_establishment(request): title = \"Cadastrar Estabelecimento\" if request.method == \"POST\": form = EstablishmentForm(request.POST)", "\"<KEY>\" div = Establishment.objects.get(id=id) address = div.endereco+\"+\"+div.cidade r = requests.get(\"https://maps.googleapis.com/maps/api/geocode/json?address=\"+address+\"&key=\"+chave) if r.status_code ==", "'formevent.html') def mapa(request, id): latitude = 0 longitude = 0 # if request.method", "if request.method == \"POST\": form = VoluntaryServiceForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return", "return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_health_service(request, id): title = \"Editar Serviço", "form,'title':title}) else: form = HealthServiceForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def create_donation(request):", "import CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm @login_required def create_health_service(request): title = \"Cadastrar Serviço\" if", "if request.method == \"POST\": form = EstablishmentForm(request.POST, instance=product) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index'))", "form,'title':title}) @login_required def create_donation(request): title = \"Cadastrar Doação\" if request.method == \"POST\": form", "render(request, 'fale-conosco.html', {\"servico\": servico}) def perfil(request, id): servico = HealthService.objects.get(id=id) return render(request, 'perfil.html',", "import CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm , VoluntaryServiceForm from .models import Establishment, HealthService, Donation,", "delete_donation.delete() return render(request, 'delete.html') @login_required def delete_voluntary(request, id): delete_voluntary = get_object_or_404(VoluntaryService, id=id) delete_voluntary.delete()", "# divulgacao.data = request.POST['data'] # divulgacao.user = request.user # divulgacao.save() # return render(request", "instance=donation) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('index')) return render(request, 'establishment.html', {'form': form,'title':title}) else: form", "return render(request, 'establishment.html', {'form': form,'title':title}) else: form = EstablishmentForm() return render(request, 'establishment.html', {'form':", "request.method == 'POST': # divulgacao = Establishment() # divulgacao.nomeEvento = request.POST['nomeEvento'] # divulgacao.categoria", "HealthServiceForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form", "render(request, 'establishment.html', {'form': form,'title':title}) else: form = EstablishmentForm(instance=establishment) return render(request, 'establishment.html', {'form': form,'title':title})", "volunteers = VoluntaryService.objects.all() print(volunteers) return render(request, 'list_voluntary.html', {\"volunteers\": volunteers}) def fale_conosco(request, id): servico", "form = EstablishmentForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def create_voluntary(request): title =", "establishments}) def list_health_service(request): health_services = HealthService.objects.all() return render(request, 'list_health_service.html', {\"health_services\": health_services}) def list_donation(request):", "= requests.get(\"https://maps.googleapis.com/maps/api/geocode/json?address=\"+address+\"&key=\"+chave) if r.status_code == 200: dados = json.loads(r.content) latitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lat\"] longitude", "'list_voluntary.html', {\"volunteers\": volunteers}) def fale_conosco(request, id): servico = HealthService.objects.get(id=id) if request.method == \"POST\":", "{'form': form,'title':title}) else: form = VoluntaryServiceForm(instance=voluntary) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def", "id): title = \"Editar Estabelecimento\" establishment = get_object_or_404(Establishment, id=id) if request.method == \"POST\":", "request.POST['telefone'] # divulgacao.horarioInicio = request.POST['horarioInicio'] # divulgacao.horarioFim = request.POST['horarioFim'] # divulgacao.data = request.POST['data']", "return render(request, 'establishment.html', {'form': form,'title':title}) else: form = HealthServiceForm() return render(request, 'establishment.html', {'form':", "= Establishment.objects.all() print(establishments) return render(request, 'list_establishment.html', {\"establishments\": establishments}) def list_health_service(request): health_services = HealthService.objects.all()", "r = requests.get(\"https://maps.googleapis.com/maps/api/geocode/json?address=\"+address+\"&key=\"+chave) if r.status_code == 200: dados = json.loads(r.content) latitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lat\"]", "Donation, VoluntaryService # from .models import Divulgacoes import requests import urllib, json import", "from django.shortcuts import render from django.urls import reverse from django.http import HttpResponseRedirect from", "donations = Donation.objects.all() return render(request, 'list_donation.html', {\"donations\": donations}) def list_voluntary(request): volunteers = VoluntaryService.objects.all()", "form = DonationForm(instance=donation) return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def edit_voluntary(request, id): title", "'delete.html') def list_establishment(request): establishments = Establishment.objects.all() print(establishments) return render(request, 'list_establishment.html', {\"establishments\": establishments}) def", "return render(request, 'establishment.html', {'form': form,'title':title}) @login_required def delete_establishment(request, id): establishment = get_object_or_404(Establishment, id=id)", "divulgacao.save() # return render(request , 'index.html') # return render(request , 'formevent.html') def mapa(request,", "mail.starttls() mail.login('<EMAIL>', 'fiscaeunb') mail.sendmail('<EMAIL>', email, m.as_string()) return render(request, 'fale-conosco.html', {\"servico\": servico}) def perfil(request,", "= VoluntaryService.objects.all() print(volunteers) return render(request, 'list_voluntary.html', {\"volunteers\": volunteers}) def fale_conosco(request, id): servico =", "= request.POST['data'] # divulgacao.user = request.user # divulgacao.save() # return render(request , 'index.html')", "Voluntário\" voluntary = get_object_or_404(VoluntaryService, id=id) if request.method == \"POST\": form = VoluntaryServiceForm(request.POST, instance=voluntary)", "delete_voluntary.delete() return render(request, 'delete.html') def list_establishment(request): establishments = Establishment.objects.all() print(establishments) return render(request, 'list_establishment.html',", "import HttpResponse from django.contrib.auth.decorators import login_required from .forms import CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm", "= request.POST['nome'] email = request.POST['email'] texto = request.POST['mensagem'] m = MIMEText(texto) m.set_charset('utf-8') m['Subject']", "# divulgacao.telefone = request.POST['telefone'] # divulgacao.horarioInicio = request.POST['horarioInicio'] # divulgacao.horarioFim = request.POST['horarioFim'] #", "'index.html') # return render(request , 'formevent.html') def mapa(request, id): latitude = 0 longitude", "id): title = \"Editar Voluntário\" voluntary = get_object_or_404(VoluntaryService, id=id) if request.method == \"POST\":", "@login_required def delete_voluntary(request, id): delete_voluntary = get_object_or_404(VoluntaryService, id=id) delete_voluntary.delete() return render(request, 'delete.html') def", "django.contrib.auth.decorators import login_required from .forms import CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm , VoluntaryServiceForm from", "\"POST\": nome = request.POST['nome'] email = request.POST['email'] texto = request.POST['mensagem'] m = MIMEText(texto)", "render(request, 'establishment.html', {'form': form,'title':title}) else: form = EstablishmentForm() return render(request, 'establishment.html', {'form': form,'title':title})", "request.method == \"POST\": form = HealthServiceForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request,", "\"POST\": form = EstablishmentForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form':", "\"Cadastrar Estabelecimento\" if request.method == \"POST\": form = EstablishmentForm(request.POST) if form.is_valid(): form.save() return", "request.method == \"POST\": form = EstablishmentForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request,", "@login_required def edit_establishment(request, id): title = \"Editar Estabelecimento\" establishment = get_object_or_404(Establishment, id=id) if", "EstablishmentForm @login_required def create_health_service(request): title = \"Cadastrar Serviço\" if request.method == \"POST\": form", "id): establishment = get_object_or_404(Establishment, id=id) establishment.delete() return render(request, 'delete.html') @login_required def delete_health_service(request, id):", "'establishment.html', {'form': form,'title':title}) @login_required def edit_establishment(request, id): title = \"Editar Estabelecimento\" establishment =", "= json.loads(r.content) latitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lat\"] longitude = dados[\"results\"][0][\"geometry\"][\"location\"][\"lng\"] #print(latitude) #print(longitude) return render(request, 'mapa.html',", "from .forms import CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm @login_required def create_health_service(request): title = \"Cadastrar", "render(request, 'establishment.html', {'form': form,'title':title}) else: form = VoluntaryServiceForm() return render(request, 'establishment.html', {'form': form,'title':title})", "render(request, 'establishment.html', {'form': form,'title':title}) else: form = VoluntaryServiceForm(instance=voluntary) return render(request, 'establishment.html', {'form': form,'title':title})", "= \"Cadastrar Doação\" if request.method == \"POST\": form = DonationForm(request.POST) if form.is_valid(): form.save()", "health_services = HealthService.objects.all() return render(request, 'list_health_service.html', {\"health_services\": health_services}) def list_donation(request): donations = Donation.objects.all()", "{'form': form,'title':title}) @login_required def edit_health_service(request, id): title = \"Editar Serviço de Saude\" health_service", "DonationForm, HealthServiceForm, EstablishmentForm , VoluntaryServiceForm from .models import Establishment, HealthService, Donation, VoluntaryService #", "return render(request, 'servicolist.html', {'estabelecimentos':estabelecimentos,'saude':saude,'doacao':doacao}) @login_required def create_establishment(request): title = \"Cadastrar Estabelecimento\" if request.method", "form,'title':title}) @login_required def delete_establishment(request, id): establishment = get_object_or_404(Establishment, id=id) establishment.delete() return render(request, 'delete.html')", "Serviço\" if request.method == \"POST\": form = HealthServiceForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\"))", "id): delete_voluntary = get_object_or_404(VoluntaryService, id=id) delete_voluntary.delete() return render(request, 'delete.html') def list_establishment(request): establishments =", "request.POST['categoria'] # divulgacao.cidade = request.POST['cidade'] # #divulgacao.bairro = request.POST['bairro'] # divulgacao.endereco = request.POST['endereco']", "form,'title':title}) from .forms import CommunityActionForm, DonationForm, HealthServiceForm, EstablishmentForm @login_required def create_health_service(request): title =", "establishment = get_object_or_404(Establishment, id=id) if request.method == \"POST\": form = EstablishmentForm(request.POST, instance=product) if", "'establishment.html', {'form': form,'title':title}) else: form = EstablishmentForm() return render(request, 'establishment.html', {'form': form,'title':title}) @login_required", "render(request, 'establishment.html', {'form': form,'title':title}) else: form = DonationForm() return render(request, 'establishment.html', {'form': form,'title':title})", "# @login_required # def formevent(request): # if request.method == 'POST': # divulgacao =", "render(request, 'list_donation.html', {\"donations\": donations}) def list_voluntary(request): volunteers = VoluntaryService.objects.all() print(volunteers) return render(request, 'list_voluntary.html',", "= get_object_or_404(Donation, id=id) delete_donation.delete() return render(request, 'delete.html') @login_required def delete_voluntary(request, id): delete_voluntary =", "'index.html') def servicolist(request): estabelecimentos = Establishment.objects.all() saude = HealthService.objects.all() doacao = Donation.objects.all() return", "import MIMEText from django.shortcuts import get_object_or_404 from django.shortcuts import render from django.urls import", "form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = DonationForm() return", "form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form = HealthServiceForm() return", "def create_donation(request): title = \"Cadastrar Doação\" if request.method == \"POST\": form = DonationForm(request.POST)", "EstablishmentForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title}) else: form", "form = VoluntaryServiceForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse(\"index\")) return render(request, 'establishment.html', {'form': form,'title':title})", "HealthService, Donation, VoluntaryService # from .models import Divulgacoes import requests import urllib, json", "divulgacao.endereco = request.POST['endereco'] # #divulgacao.cep = request.POST['cep'] # divulgacao.telefone = request.POST['telefone'] # divulgacao.horarioInicio" ]
[ "%} ... {% endnavlink %} Generate navigation link for <view>. The navigation link", "same as the current page, the css class \"active\" will be added to", "= parser.parse(('endnavlink',)) parser.delete_first_token() return NavigationLink(nodelist, view, args) except ValueError: raise template.TemplateSyntaxError(\"%r tag requires", "2 arguments\" % token.contents.split()[0]) # required arguments tag_name = strip_quotes(args.pop(0)) view = strip_quotes(args.pop(0))", "from django.core.urlresolvers import reverse register = template.Library() def strip_quotes(s): if s[0] == s[-1]", "\"\"\" usage: {% navlink <view> %} ... {% endnavlink %} Generate navigation link", "the current page, the css class \"active\" will be added to the <li>", "strip quotes else: return s @register.simple_tag(takes_context=True) def css_active(context, url): request = context['request'] if", "s.startswith(('\"', \"'\")): # is quoted string return s[1:-1] # strip quotes else: return", "nodelist, view, args): self.nodelist = nodelist self.view = view self.args = args def", "is quoted string return s[1:-1] # strip quotes else: return s @register.simple_tag(takes_context=True) def", "if s[0] == s[-1] and s.startswith(('\"', \"'\")): # is quoted string return s[1:-1]", "register = template.Library() def strip_quotes(s): if s[0] == s[-1] and s.startswith(('\"', \"'\")): #", "token.split_contents() if len(args) < 2: raise template.TemplateSyntaxError( \"%r tag requires at least 2", "s[0] == s[-1] and s.startswith(('\"', \"'\")): # is quoted string return s[1:-1] #", "least 2 arguments\" % token.contents.split()[0]) # required arguments tag_name = strip_quotes(args.pop(0)) view =", "token): \"\"\" usage: {% navlink <view> %} ... {% endnavlink %} Generate navigation", "a listitem (<li><a>...</a></li>). If the link is the same as the current page,", "__init__(self, nodelist, view, args): self.nodelist = nodelist self.view = view self.args = args", "# required arguments tag_name = strip_quotes(args.pop(0)) view = strip_quotes(args.pop(0)) args = [template.Variable(arg) for", "be added to the <li> tag (i.e. <li class=\"active\">...</li>). \"\"\" try: args =", "link is the same as the current page, the css class \"active\" will", "<view> %} ... {% endnavlink %} Generate navigation link for <view>. The navigation", "%} Generate navigation link for <view>. The navigation link is a listitem (<li><a>...</a></li>).", "do_navlink(parser, token): \"\"\" usage: {% navlink <view> %} ... {% endnavlink %} Generate", "parser.delete_first_token() return NavigationLink(nodelist, view, args) except ValueError: raise template.TemplateSyntaxError(\"%r tag requires a single", "return \"active\" return \"\" @register.filter(name='css_class') def css_class(value, arg): return value.as_widget(attrs={'class': arg}) @register.tag(name='navlink') def", "class NavigationLink(template.Node): def __init__(self, nodelist, view, args): self.nodelist = nodelist self.view = view", "<li class=\"active\">...</li>). \"\"\" try: args = token.split_contents() if len(args) < 2: raise template.TemplateSyntaxError(", "will be added to the <li> tag (i.e. <li class=\"active\">...</li>). \"\"\" try: args", "endnavlink %} Generate navigation link for <view>. The navigation link is a listitem", "return s[1:-1] # strip quotes else: return s @register.simple_tag(takes_context=True) def css_active(context, url): request", "if request.path == url: return \"active\" return \"\" @register.filter(name='css_class') def css_class(value, arg): return", "(<li><a>...</a></li>). If the link is the same as the current page, the css", "quotes else: return s @register.simple_tag(takes_context=True) def css_active(context, url): request = context['request'] if request.path", "css class \"active\" will be added to the <li> tag (i.e. <li class=\"active\">...</li>).", "template.Library() def strip_quotes(s): if s[0] == s[-1] and s.startswith(('\"', \"'\")): # is quoted", "NavigationLink(template.Node): def __init__(self, nodelist, view, args): self.nodelist = nodelist self.view = view self.args", "as the current page, the css class \"active\" will be added to the", "def do_navlink(parser, token): \"\"\" usage: {% navlink <view> %} ... {% endnavlink %}", "template.TemplateSyntaxError(\"%r tag requires a single argument\" % (tokens.contents.split()[0])) class NavigationLink(template.Node): def __init__(self, nodelist,", "def strip_quotes(s): if s[0] == s[-1] and s.startswith(('\"', \"'\")): # is quoted string", "2: raise template.TemplateSyntaxError( \"%r tag requires at least 2 arguments\" % token.contents.split()[0]) #", "= [template.Variable(arg) for arg in args] nodelist = parser.parse(('endnavlink',)) parser.delete_first_token() return NavigationLink(nodelist, view,", "return value.as_widget(attrs={'class': arg}) @register.tag(name='navlink') def do_navlink(parser, token): \"\"\" usage: {% navlink <view> %}", "raise template.TemplateSyntaxError( \"%r tag requires at least 2 arguments\" % token.contents.split()[0]) # required", "arg in args] nodelist = parser.parse(('endnavlink',)) parser.delete_first_token() return NavigationLink(nodelist, view, args) except ValueError:", "request = context['request'] if request.path == url: return \"active\" return \"\" @register.filter(name='css_class') def", "for arg in args] nodelist = parser.parse(('endnavlink',)) parser.delete_first_token() return NavigationLink(nodelist, view, args) except", "tag requires a single argument\" % (tokens.contents.split()[0])) class NavigationLink(template.Node): def __init__(self, nodelist, view,", "import template from django.core.urlresolvers import reverse register = template.Library() def strip_quotes(s): if s[0]", "the link is the same as the current page, the css class \"active\"", "<view>. The navigation link is a listitem (<li><a>...</a></li>). If the link is the", "arg}) @register.tag(name='navlink') def do_navlink(parser, token): \"\"\" usage: {% navlink <view> %} ... {%", "<gh_stars>0 from django import template from django.core.urlresolvers import reverse register = template.Library() def", "The navigation link is a listitem (<li><a>...</a></li>). If the link is the same", "view, args): self.nodelist = nodelist self.view = view self.args = args def render(self,", "to the <li> tag (i.e. <li class=\"active\">...</li>). \"\"\" try: args = token.split_contents() if", "import reverse register = template.Library() def strip_quotes(s): if s[0] == s[-1] and s.startswith(('\"',", "tag (i.e. <li class=\"active\">...</li>). \"\"\" try: args = token.split_contents() if len(args) < 2:", "reverse register = template.Library() def strip_quotes(s): if s[0] == s[-1] and s.startswith(('\"', \"'\")):", "arguments\" % token.contents.split()[0]) # required arguments tag_name = strip_quotes(args.pop(0)) view = strip_quotes(args.pop(0)) args", "strip_quotes(s): if s[0] == s[-1] and s.startswith(('\"', \"'\")): # is quoted string return", "args) except ValueError: raise template.TemplateSyntaxError(\"%r tag requires a single argument\" % (tokens.contents.split()[0])) class", "reverse(self.view, args=args) return '<li class=\"%(active)s\"><a href=\"%(url)s\">%(content)s</a></li>' % { 'url': url, 'active': css_active(context, url),", "class=\"active\">...</li>). \"\"\" try: args = token.split_contents() if len(args) < 2: raise template.TemplateSyntaxError( \"%r", "args = token.split_contents() if len(args) < 2: raise template.TemplateSyntaxError( \"%r tag requires at", "request.path == url: return \"active\" return \"\" @register.filter(name='css_class') def css_class(value, arg): return value.as_widget(attrs={'class':", "== url: return \"active\" return \"\" @register.filter(name='css_class') def css_class(value, arg): return value.as_widget(attrs={'class': arg})", "is a listitem (<li><a>...</a></li>). If the link is the same as the current", "return \"\" @register.filter(name='css_class') def css_class(value, arg): return value.as_widget(attrs={'class': arg}) @register.tag(name='navlink') def do_navlink(parser, token):", "args = [template.Variable(arg) for arg in args] nodelist = parser.parse(('endnavlink',)) parser.delete_first_token() return NavigationLink(nodelist,", "css_active(context, url): request = context['request'] if request.path == url: return \"active\" return \"\"", "= strip_quotes(args.pop(0)) args = [template.Variable(arg) for arg in args] nodelist = parser.parse(('endnavlink',)) parser.delete_first_token()", "ValueError: raise template.TemplateSyntaxError(\"%r tag requires a single argument\" % (tokens.contents.split()[0])) class NavigationLink(template.Node): def", "arg in self.args] url = reverse(self.view, args=args) return '<li class=\"%(active)s\"><a href=\"%(url)s\">%(content)s</a></li>' % {", "django import template from django.core.urlresolvers import reverse register = template.Library() def strip_quotes(s): if", "... {% endnavlink %} Generate navigation link for <view>. The navigation link is", "= nodelist self.view = view self.args = args def render(self, context): args =", "if len(args) < 2: raise template.TemplateSyntaxError( \"%r tag requires at least 2 arguments\"", "def css_class(value, arg): return value.as_widget(attrs={'class': arg}) @register.tag(name='navlink') def do_navlink(parser, token): \"\"\" usage: {%", "view = strip_quotes(args.pop(0)) args = [template.Variable(arg) for arg in args] nodelist = parser.parse(('endnavlink',))", "{% navlink <view> %} ... {% endnavlink %} Generate navigation link for <view>.", "self.view = view self.args = args def render(self, context): args = [arg.resolve(context) for", "\"\"\" try: args = token.split_contents() if len(args) < 2: raise template.TemplateSyntaxError( \"%r tag", "context): args = [arg.resolve(context) for arg in self.args] url = reverse(self.view, args=args) return", "= reverse(self.view, args=args) return '<li class=\"%(active)s\"><a href=\"%(url)s\">%(content)s</a></li>' % { 'url': url, 'active': css_active(context,", "args=args) return '<li class=\"%(active)s\"><a href=\"%(url)s\">%(content)s</a></li>' % { 'url': url, 'active': css_active(context, url), 'content':", "@register.tag(name='navlink') def do_navlink(parser, token): \"\"\" usage: {% navlink <view> %} ... {% endnavlink", "== s[-1] and s.startswith(('\"', \"'\")): # is quoted string return s[1:-1] # strip", "(tokens.contents.split()[0])) class NavigationLink(template.Node): def __init__(self, nodelist, view, args): self.nodelist = nodelist self.view =", "Generate navigation link for <view>. The navigation link is a listitem (<li><a>...</a></li>). If", "= token.split_contents() if len(args) < 2: raise template.TemplateSyntaxError( \"%r tag requires at least", "required arguments tag_name = strip_quotes(args.pop(0)) view = strip_quotes(args.pop(0)) args = [template.Variable(arg) for arg", "self.args] url = reverse(self.view, args=args) return '<li class=\"%(active)s\"><a href=\"%(url)s\">%(content)s</a></li>' % { 'url': url,", "view, args) except ValueError: raise template.TemplateSyntaxError(\"%r tag requires a single argument\" % (tokens.contents.split()[0]))", "'<li class=\"%(active)s\"><a href=\"%(url)s\">%(content)s</a></li>' % { 'url': url, 'active': css_active(context, url), 'content': self.nodelist.render(context), }", "navlink <view> %} ... {% endnavlink %} Generate navigation link for <view>. The", "If the link is the same as the current page, the css class", "NavigationLink(nodelist, view, args) except ValueError: raise template.TemplateSyntaxError(\"%r tag requires a single argument\" %", "args] nodelist = parser.parse(('endnavlink',)) parser.delete_first_token() return NavigationLink(nodelist, view, args) except ValueError: raise template.TemplateSyntaxError(\"%r", "template.TemplateSyntaxError( \"%r tag requires at least 2 arguments\" % token.contents.split()[0]) # required arguments", "current page, the css class \"active\" will be added to the <li> tag", "= strip_quotes(args.pop(0)) view = strip_quotes(args.pop(0)) args = [template.Variable(arg) for arg in args] nodelist", "return '<li class=\"%(active)s\"><a href=\"%(url)s\">%(content)s</a></li>' % { 'url': url, 'active': css_active(context, url), 'content': self.nodelist.render(context),", "from django import template from django.core.urlresolvers import reverse register = template.Library() def strip_quotes(s):", "token.contents.split()[0]) # required arguments tag_name = strip_quotes(args.pop(0)) view = strip_quotes(args.pop(0)) args = [template.Variable(arg)", "arg): return value.as_widget(attrs={'class': arg}) @register.tag(name='navlink') def do_navlink(parser, token): \"\"\" usage: {% navlink <view>", "= args def render(self, context): args = [arg.resolve(context) for arg in self.args] url", "url = reverse(self.view, args=args) return '<li class=\"%(active)s\"><a href=\"%(url)s\">%(content)s</a></li>' % { 'url': url, 'active':", "usage: {% navlink <view> %} ... {% endnavlink %} Generate navigation link for", "listitem (<li><a>...</a></li>). If the link is the same as the current page, the", "in self.args] url = reverse(self.view, args=args) return '<li class=\"%(active)s\"><a href=\"%(url)s\">%(content)s</a></li>' % { 'url':", "link is a listitem (<li><a>...</a></li>). If the link is the same as the", "navigation link is a listitem (<li><a>...</a></li>). If the link is the same as", "nodelist self.view = view self.args = args def render(self, context): args = [arg.resolve(context)", "link for <view>. The navigation link is a listitem (<li><a>...</a></li>). If the link", "page, the css class \"active\" will be added to the <li> tag (i.e.", "# is quoted string return s[1:-1] # strip quotes else: return s @register.simple_tag(takes_context=True)", "{% endnavlink %} Generate navigation link for <view>. The navigation link is a", "\"active\" will be added to the <li> tag (i.e. <li class=\"active\">...</li>). \"\"\" try:", "\"\" @register.filter(name='css_class') def css_class(value, arg): return value.as_widget(attrs={'class': arg}) @register.tag(name='navlink') def do_navlink(parser, token): \"\"\"", "< 2: raise template.TemplateSyntaxError( \"%r tag requires at least 2 arguments\" % token.contents.split()[0])", "render(self, context): args = [arg.resolve(context) for arg in self.args] url = reverse(self.view, args=args)", "class \"active\" will be added to the <li> tag (i.e. <li class=\"active\">...</li>). \"\"\"", "= view self.args = args def render(self, context): args = [arg.resolve(context) for arg", "strip_quotes(args.pop(0)) view = strip_quotes(args.pop(0)) args = [template.Variable(arg) for arg in args] nodelist =", "a single argument\" % (tokens.contents.split()[0])) class NavigationLink(template.Node): def __init__(self, nodelist, view, args): self.nodelist", "= [arg.resolve(context) for arg in self.args] url = reverse(self.view, args=args) return '<li class=\"%(active)s\"><a", "= template.Library() def strip_quotes(s): if s[0] == s[-1] and s.startswith(('\"', \"'\")): # is", "single argument\" % (tokens.contents.split()[0])) class NavigationLink(template.Node): def __init__(self, nodelist, view, args): self.nodelist =", "context['request'] if request.path == url: return \"active\" return \"\" @register.filter(name='css_class') def css_class(value, arg):", "args def render(self, context): args = [arg.resolve(context) for arg in self.args] url =", "requires at least 2 arguments\" % token.contents.split()[0]) # required arguments tag_name = strip_quotes(args.pop(0))", "self.nodelist = nodelist self.view = view self.args = args def render(self, context): args", "added to the <li> tag (i.e. <li class=\"active\">...</li>). \"\"\" try: args = token.split_contents()", "template from django.core.urlresolvers import reverse register = template.Library() def strip_quotes(s): if s[0] ==", "css_class(value, arg): return value.as_widget(attrs={'class': arg}) @register.tag(name='navlink') def do_navlink(parser, token): \"\"\" usage: {% navlink", "except ValueError: raise template.TemplateSyntaxError(\"%r tag requires a single argument\" % (tokens.contents.split()[0])) class NavigationLink(template.Node):", "raise template.TemplateSyntaxError(\"%r tag requires a single argument\" % (tokens.contents.split()[0])) class NavigationLink(template.Node): def __init__(self,", "view self.args = args def render(self, context): args = [arg.resolve(context) for arg in", "args = [arg.resolve(context) for arg in self.args] url = reverse(self.view, args=args) return '<li", "(i.e. <li class=\"active\">...</li>). \"\"\" try: args = token.split_contents() if len(args) < 2: raise", "def __init__(self, nodelist, view, args): self.nodelist = nodelist self.view = view self.args =", "% (tokens.contents.split()[0])) class NavigationLink(template.Node): def __init__(self, nodelist, view, args): self.nodelist = nodelist self.view", "return s @register.simple_tag(takes_context=True) def css_active(context, url): request = context['request'] if request.path == url:", "argument\" % (tokens.contents.split()[0])) class NavigationLink(template.Node): def __init__(self, nodelist, view, args): self.nodelist = nodelist", "and s.startswith(('\"', \"'\")): # is quoted string return s[1:-1] # strip quotes else:", "requires a single argument\" % (tokens.contents.split()[0])) class NavigationLink(template.Node): def __init__(self, nodelist, view, args):", "string return s[1:-1] # strip quotes else: return s @register.simple_tag(takes_context=True) def css_active(context, url):", "tag_name = strip_quotes(args.pop(0)) view = strip_quotes(args.pop(0)) args = [template.Variable(arg) for arg in args]", "s[1:-1] # strip quotes else: return s @register.simple_tag(takes_context=True) def css_active(context, url): request =", "else: return s @register.simple_tag(takes_context=True) def css_active(context, url): request = context['request'] if request.path ==", "args): self.nodelist = nodelist self.view = view self.args = args def render(self, context):", "for arg in self.args] url = reverse(self.view, args=args) return '<li class=\"%(active)s\"><a href=\"%(url)s\">%(content)s</a></li>' %", "strip_quotes(args.pop(0)) args = [template.Variable(arg) for arg in args] nodelist = parser.parse(('endnavlink',)) parser.delete_first_token() return", "[arg.resolve(context) for arg in self.args] url = reverse(self.view, args=args) return '<li class=\"%(active)s\"><a href=\"%(url)s\">%(content)s</a></li>'", "self.args = args def render(self, context): args = [arg.resolve(context) for arg in self.args]", "= context['request'] if request.path == url: return \"active\" return \"\" @register.filter(name='css_class') def css_class(value,", "value.as_widget(attrs={'class': arg}) @register.tag(name='navlink') def do_navlink(parser, token): \"\"\" usage: {% navlink <view> %} ...", "at least 2 arguments\" % token.contents.split()[0]) # required arguments tag_name = strip_quotes(args.pop(0)) view", "the same as the current page, the css class \"active\" will be added", "url): request = context['request'] if request.path == url: return \"active\" return \"\" @register.filter(name='css_class')", "quoted string return s[1:-1] # strip quotes else: return s @register.simple_tag(takes_context=True) def css_active(context,", "arguments tag_name = strip_quotes(args.pop(0)) view = strip_quotes(args.pop(0)) args = [template.Variable(arg) for arg in", "navigation link for <view>. The navigation link is a listitem (<li><a>...</a></li>). If the", "for <view>. The navigation link is a listitem (<li><a>...</a></li>). If the link is", "[template.Variable(arg) for arg in args] nodelist = parser.parse(('endnavlink',)) parser.delete_first_token() return NavigationLink(nodelist, view, args)", "parser.parse(('endnavlink',)) parser.delete_first_token() return NavigationLink(nodelist, view, args) except ValueError: raise template.TemplateSyntaxError(\"%r tag requires a", "s @register.simple_tag(takes_context=True) def css_active(context, url): request = context['request'] if request.path == url: return", "\"'\")): # is quoted string return s[1:-1] # strip quotes else: return s", "# strip quotes else: return s @register.simple_tag(takes_context=True) def css_active(context, url): request = context['request']", "try: args = token.split_contents() if len(args) < 2: raise template.TemplateSyntaxError( \"%r tag requires", "len(args) < 2: raise template.TemplateSyntaxError( \"%r tag requires at least 2 arguments\" %", "nodelist = parser.parse(('endnavlink',)) parser.delete_first_token() return NavigationLink(nodelist, view, args) except ValueError: raise template.TemplateSyntaxError(\"%r tag", "the <li> tag (i.e. <li class=\"active\">...</li>). \"\"\" try: args = token.split_contents() if len(args)", "def css_active(context, url): request = context['request'] if request.path == url: return \"active\" return", "% token.contents.split()[0]) # required arguments tag_name = strip_quotes(args.pop(0)) view = strip_quotes(args.pop(0)) args =", "@register.simple_tag(takes_context=True) def css_active(context, url): request = context['request'] if request.path == url: return \"active\"", "\"%r tag requires at least 2 arguments\" % token.contents.split()[0]) # required arguments tag_name", "s[-1] and s.startswith(('\"', \"'\")): # is quoted string return s[1:-1] # strip quotes", "url: return \"active\" return \"\" @register.filter(name='css_class') def css_class(value, arg): return value.as_widget(attrs={'class': arg}) @register.tag(name='navlink')", "\"active\" return \"\" @register.filter(name='css_class') def css_class(value, arg): return value.as_widget(attrs={'class': arg}) @register.tag(name='navlink') def do_navlink(parser,", "the css class \"active\" will be added to the <li> tag (i.e. <li", "<li> tag (i.e. <li class=\"active\">...</li>). \"\"\" try: args = token.split_contents() if len(args) <", "return NavigationLink(nodelist, view, args) except ValueError: raise template.TemplateSyntaxError(\"%r tag requires a single argument\"", "def render(self, context): args = [arg.resolve(context) for arg in self.args] url = reverse(self.view,", "@register.filter(name='css_class') def css_class(value, arg): return value.as_widget(attrs={'class': arg}) @register.tag(name='navlink') def do_navlink(parser, token): \"\"\" usage:", "is the same as the current page, the css class \"active\" will be", "django.core.urlresolvers import reverse register = template.Library() def strip_quotes(s): if s[0] == s[-1] and", "in args] nodelist = parser.parse(('endnavlink',)) parser.delete_first_token() return NavigationLink(nodelist, view, args) except ValueError: raise", "tag requires at least 2 arguments\" % token.contents.split()[0]) # required arguments tag_name =" ]
[ "Exception as e: print(\"Error opening file \"+args.funcargs) sys.exit(1) if not isinstance(json_data,dict): print(\"Error loading", "\"Failed to create a merge process.\" print(msg) raise ex elif args.funcname == \"repack\":", "func_args={} try: with open(args.funcargs) as json_file: json_data = json.load(json_file) except Exception as e:", "to create a repack process.\" print(msg) raise ex else: try: from Configuration.DataProcessing.GetScenario import", "\"MergedError\" try: from Configuration.DataProcessing.Merge import mergeProcess process = mergeProcess(**func_args) except Exception as ex:", "\" msg += str(scenario) msg += \"\\nWith Error:\" msg += str(ex) print(msg) raise", "str(ex) print(msg) raise ex try: process = getattr(scenarioInst, args.funcname)(**func_args) except Exception as ex:", "func_args = json_data process=create_process(args, func_args) with open(args.output_pkl, \"wb\") as output_file: if output_file.closed: print(\"Error", "func_args['outputmod_label'] = \"MergedError\" try: from Configuration.DataProcessing.Merge import mergeProcess process = mergeProcess(**func_args) except Exception", "it from this package instead import archived_argparse as argparse import sys, re, os", "as output_file: if output_file.closed: print(\"Error loading pickle input \"+args.output_pkl[i]) sys.exit(1) pickle.dump(process, output_file, protocol=0)", "except Exception as ex: msg = \"Failed to retrieve the Scenario named \"", "if args.funcname == \"merge\": if not args.useErrorDataset: func_args['outputmod_label'] = \"MergedError\" try: from Configuration.DataProcessing.Merge", "+= \"\\nWith Error:\" msg += str(ex) print(msg) raise ex try: process = getattr(scenarioInst,", "as ex: msg = \"Failed to create a merge process.\" print(msg) raise ex", "print(\"Error loading dictionary \"+args.funcargs) sys.exit(1) func_args = json_data process=create_process(args, func_args) with open(args.output_pkl, \"wb\")", "re, os import json from tweak_program_helpers import make_parser def create_process(args,func_args): if args.funcname ==", "Configuration.DataProcessing.Repack import repackProcess process = repackProcess(**func_args) except Exception as ex: msg = \"Failed", "not args.useErrorDataset: func_args['outputmod_label'] = \"MergedError\" try: from Configuration.DataProcessing.Merge import mergeProcess process = mergeProcess(**func_args)", "= repackProcess(**func_args) except Exception as ex: msg = \"Failed to create a repack", "import make_parser def create_process(args,func_args): if args.funcname == \"merge\": if not args.useErrorDataset: func_args['outputmod_label'] =", "as ex: msg = \"Failed to load process from Scenario %s (%s).\" %", "json_file: json_data = json.load(json_file) except Exception as e: print(\"Error opening file \"+args.funcargs) sys.exit(1)", "create a merge process.\" print(msg) raise ex elif args.funcname == \"repack\": try: from", "str(scenario) msg += \"\\nWith Error:\" msg += str(ex) print(msg) raise ex try: process", "args.useErrorDataset: func_args['outputmod_label'] = \"MergedError\" try: from Configuration.DataProcessing.Merge import mergeProcess process = mergeProcess(**func_args) except", "= mergeProcess(**func_args) except Exception as ex: msg = \"Failed to create a merge", "archived_argparse as argparse import sys, re, os import json from tweak_program_helpers import make_parser", "parser.add_argument('--funcargs', required=True) parser.add_argument('--useErrorDataset', action=\"store_true\", required=False) parser.add_argument('--output_pkl', required=True) return parser def main(): parser =", "%s (%s).\" % (scenario, scenarioInst) print(msg) raise ex return process def init_argparse(): parser", "process.\" print(msg) raise ex else: try: from Configuration.DataProcessing.GetScenario import getScenario scenarioInst = getScenario(scenario)", "mergeProcess process = mergeProcess(**func_args) except Exception as ex: msg = \"Failed to create", "process = getattr(scenarioInst, args.funcname)(**func_args) except Exception as ex: msg = \"Failed to load", "init_argparse() args = parser.parse_args() func_args={} try: with open(args.funcargs) as json_file: json_data = json.load(json_file)", "= json.load(json_file) except Exception as e: print(\"Error opening file \"+args.funcargs) sys.exit(1) if not", "scenarioInst = getScenario(scenario) except Exception as ex: msg = \"Failed to retrieve the", "create a repack process.\" print(msg) raise ex else: try: from Configuration.DataProcessing.GetScenario import getScenario", "required=True) parser.add_argument('--useErrorDataset', action=\"store_true\", required=False) parser.add_argument('--output_pkl', required=True) return parser def main(): parser = init_argparse()", "with open(args.funcargs) as json_file: json_data = json.load(json_file) except Exception as e: print(\"Error opening", "try: from Configuration.DataProcessing.Repack import repackProcess process = repackProcess(**func_args) except Exception as ex: msg", "to create a merge process.\" print(msg) raise ex elif args.funcname == \"repack\": try:", "return process def init_argparse(): parser = argparse.ArgumentParser( usage=\"%(prog)s [OPTION] [FILE]...\", description=\"Process creator (merge,", "create_process(args,func_args): if args.funcname == \"merge\": if not args.useErrorDataset: func_args['outputmod_label'] = \"MergedError\" try: from", "parser.add_argument('--useErrorDataset', action=\"store_true\", required=False) parser.add_argument('--output_pkl', required=True) return parser def main(): parser = init_argparse() args", "elif args.funcname == \"repack\": try: from Configuration.DataProcessing.Repack import repackProcess process = repackProcess(**func_args) except", "= json_data process=create_process(args, func_args) with open(args.output_pkl, \"wb\") as output_file: if output_file.closed: print(\"Error loading", "msg = \"Failed to create a repack process.\" print(msg) raise ex else: try:", "import pickle try: import argparse except ImportError: #get it from this package instead", "msg += str(scenario) msg += \"\\nWith Error:\" msg += str(ex) print(msg) raise ex", "= init_argparse() args = parser.parse_args() func_args={} try: with open(args.funcargs) as json_file: json_data =", "from Configuration.DataProcessing.Repack import repackProcess process = repackProcess(**func_args) except Exception as ex: msg =", "ex return process def init_argparse(): parser = argparse.ArgumentParser( usage=\"%(prog)s [OPTION] [FILE]...\", description=\"Process creator", "func_args) with open(args.output_pkl, \"wb\") as output_file: if output_file.closed: print(\"Error loading pickle input \"+args.output_pkl[i])", "as json_file: json_data = json.load(json_file) except Exception as e: print(\"Error opening file \"+args.funcargs)", "#!/usr/bin/env python import FWCore.ParameterSet.Config as cms import pickle try: import argparse except ImportError:", "json_data = json.load(json_file) except Exception as e: print(\"Error opening file \"+args.funcargs) sys.exit(1) if", "msg += \"\\nWith Error:\" msg += str(ex) print(msg) raise ex try: process =", "creator (merge, DataProcessing etc)\" ) parser.add_argument('--funcname', required=True) parser.add_argument('--funcargs', required=True) parser.add_argument('--useErrorDataset', action=\"store_true\", required=False) parser.add_argument('--output_pkl',", "repack process.\" print(msg) raise ex else: try: from Configuration.DataProcessing.GetScenario import getScenario scenarioInst =", "Configuration.DataProcessing.GetScenario import getScenario scenarioInst = getScenario(scenario) except Exception as ex: msg = \"Failed", "try: from Configuration.DataProcessing.GetScenario import getScenario scenarioInst = getScenario(scenario) except Exception as ex: msg", "etc)\" ) parser.add_argument('--funcname', required=True) parser.add_argument('--funcargs', required=True) parser.add_argument('--useErrorDataset', action=\"store_true\", required=False) parser.add_argument('--output_pkl', required=True) return parser", "json from tweak_program_helpers import make_parser def create_process(args,func_args): if args.funcname == \"merge\": if not", "ex: msg = \"Failed to create a repack process.\" print(msg) raise ex else:", "json.load(json_file) except Exception as e: print(\"Error opening file \"+args.funcargs) sys.exit(1) if not isinstance(json_data,dict):", "parser def main(): parser = init_argparse() args = parser.parse_args() func_args={} try: with open(args.funcargs)", "= \"MergedError\" try: from Configuration.DataProcessing.Merge import mergeProcess process = mergeProcess(**func_args) except Exception as", "= getScenario(scenario) except Exception as ex: msg = \"Failed to retrieve the Scenario", "from this package instead import archived_argparse as argparse import sys, re, os import", "= \"Failed to retrieve the Scenario named \" msg += str(scenario) msg +=", "json_data process=create_process(args, func_args) with open(args.output_pkl, \"wb\") as output_file: if output_file.closed: print(\"Error loading pickle", "getScenario scenarioInst = getScenario(scenario) except Exception as ex: msg = \"Failed to retrieve", "print(msg) raise ex return process def init_argparse(): parser = argparse.ArgumentParser( usage=\"%(prog)s [OPTION] [FILE]...\",", "msg = \"Failed to create a merge process.\" print(msg) raise ex elif args.funcname", "try: with open(args.funcargs) as json_file: json_data = json.load(json_file) except Exception as e: print(\"Error", "from tweak_program_helpers import make_parser def create_process(args,func_args): if args.funcname == \"merge\": if not args.useErrorDataset:", "= \"Failed to create a repack process.\" print(msg) raise ex else: try: from", "DataProcessing etc)\" ) parser.add_argument('--funcname', required=True) parser.add_argument('--funcargs', required=True) parser.add_argument('--useErrorDataset', action=\"store_true\", required=False) parser.add_argument('--output_pkl', required=True) return", "return parser def main(): parser = init_argparse() args = parser.parse_args() func_args={} try: with", "msg = \"Failed to load process from Scenario %s (%s).\" % (scenario, scenarioInst)", "if not args.useErrorDataset: func_args['outputmod_label'] = \"MergedError\" try: from Configuration.DataProcessing.Merge import mergeProcess process =", "import json from tweak_program_helpers import make_parser def create_process(args,func_args): if args.funcname == \"merge\": if", "make_parser def create_process(args,func_args): if args.funcname == \"merge\": if not args.useErrorDataset: func_args['outputmod_label'] = \"MergedError\"", "retrieve the Scenario named \" msg += str(scenario) msg += \"\\nWith Error:\" msg", "except ImportError: #get it from this package instead import archived_argparse as argparse import", "to retrieve the Scenario named \" msg += str(scenario) msg += \"\\nWith Error:\"", "parser.add_argument('--output_pkl', required=True) return parser def main(): parser = init_argparse() args = parser.parse_args() func_args={}", "ex: msg = \"Failed to load process from Scenario %s (%s).\" % (scenario,", "getattr(scenarioInst, args.funcname)(**func_args) except Exception as ex: msg = \"Failed to load process from", "a merge process.\" print(msg) raise ex elif args.funcname == \"repack\": try: from Configuration.DataProcessing.Repack", "except Exception as ex: msg = \"Failed to create a repack process.\" print(msg)", "open(args.funcargs) as json_file: json_data = json.load(json_file) except Exception as e: print(\"Error opening file", "print(msg) raise ex try: process = getattr(scenarioInst, args.funcname)(**func_args) except Exception as ex: msg", "to load process from Scenario %s (%s).\" % (scenario, scenarioInst) print(msg) raise ex", "except Exception as ex: msg = \"Failed to create a merge process.\" print(msg)", "isinstance(json_data,dict): print(\"Error loading dictionary \"+args.funcargs) sys.exit(1) func_args = json_data process=create_process(args, func_args) with open(args.output_pkl,", "required=True) parser.add_argument('--funcargs', required=True) parser.add_argument('--useErrorDataset', action=\"store_true\", required=False) parser.add_argument('--output_pkl', required=True) return parser def main(): parser", "from Scenario %s (%s).\" % (scenario, scenarioInst) print(msg) raise ex return process def", "def init_argparse(): parser = argparse.ArgumentParser( usage=\"%(prog)s [OPTION] [FILE]...\", description=\"Process creator (merge, DataProcessing etc)\"", "\"repack\": try: from Configuration.DataProcessing.Repack import repackProcess process = repackProcess(**func_args) except Exception as ex:", "#get it from this package instead import archived_argparse as argparse import sys, re,", "os import json from tweak_program_helpers import make_parser def create_process(args,func_args): if args.funcname == \"merge\":", "ex: msg = \"Failed to create a merge process.\" print(msg) raise ex elif", "parser = init_argparse() args = parser.parse_args() func_args={} try: with open(args.funcargs) as json_file: json_data", "\"Failed to load process from Scenario %s (%s).\" % (scenario, scenarioInst) print(msg) raise", "usage=\"%(prog)s [OPTION] [FILE]...\", description=\"Process creator (merge, DataProcessing etc)\" ) parser.add_argument('--funcname', required=True) parser.add_argument('--funcargs', required=True)", "sys, re, os import json from tweak_program_helpers import make_parser def create_process(args,func_args): if args.funcname", "Exception as ex: msg = \"Failed to create a repack process.\" print(msg) raise", "as ex: msg = \"Failed to retrieve the Scenario named \" msg +=", "print(\"Error opening file \"+args.funcargs) sys.exit(1) if not isinstance(json_data,dict): print(\"Error loading dictionary \"+args.funcargs) sys.exit(1)", "cms import pickle try: import argparse except ImportError: #get it from this package", "ImportError: #get it from this package instead import archived_argparse as argparse import sys,", "dictionary \"+args.funcargs) sys.exit(1) func_args = json_data process=create_process(args, func_args) with open(args.output_pkl, \"wb\") as output_file:", "FWCore.ParameterSet.Config as cms import pickle try: import argparse except ImportError: #get it from", "import FWCore.ParameterSet.Config as cms import pickle try: import argparse except ImportError: #get it", "= \"Failed to load process from Scenario %s (%s).\" % (scenario, scenarioInst) print(msg)", "import archived_argparse as argparse import sys, re, os import json from tweak_program_helpers import", "process.\" print(msg) raise ex elif args.funcname == \"repack\": try: from Configuration.DataProcessing.Repack import repackProcess", "<filename>bin/cmssw_wm_create_process.py<gh_stars>0 #!/usr/bin/env python import FWCore.ParameterSet.Config as cms import pickle try: import argparse except", "Exception as ex: msg = \"Failed to retrieve the Scenario named \" msg", "except Exception as e: print(\"Error opening file \"+args.funcargs) sys.exit(1) if not isinstance(json_data,dict): print(\"Error", "argparse import sys, re, os import json from tweak_program_helpers import make_parser def create_process(args,func_args):", "sys.exit(1) func_args = json_data process=create_process(args, func_args) with open(args.output_pkl, \"wb\") as output_file: if output_file.closed:", "process = mergeProcess(**func_args) except Exception as ex: msg = \"Failed to create a", "(%s).\" % (scenario, scenarioInst) print(msg) raise ex return process def init_argparse(): parser =", "import getScenario scenarioInst = getScenario(scenario) except Exception as ex: msg = \"Failed to", "init_argparse(): parser = argparse.ArgumentParser( usage=\"%(prog)s [OPTION] [FILE]...\", description=\"Process creator (merge, DataProcessing etc)\" )", "description=\"Process creator (merge, DataProcessing etc)\" ) parser.add_argument('--funcname', required=True) parser.add_argument('--funcargs', required=True) parser.add_argument('--useErrorDataset', action=\"store_true\", required=False)", "repackProcess(**func_args) except Exception as ex: msg = \"Failed to create a repack process.\"", "== \"merge\": if not args.useErrorDataset: func_args['outputmod_label'] = \"MergedError\" try: from Configuration.DataProcessing.Merge import mergeProcess", "pickle try: import argparse except ImportError: #get it from this package instead import", "raise ex try: process = getattr(scenarioInst, args.funcname)(**func_args) except Exception as ex: msg =", "= \"Failed to create a merge process.\" print(msg) raise ex elif args.funcname ==", "merge process.\" print(msg) raise ex elif args.funcname == \"repack\": try: from Configuration.DataProcessing.Repack import", "argparse except ImportError: #get it from this package instead import archived_argparse as argparse", "Exception as ex: msg = \"Failed to create a merge process.\" print(msg) raise", "+= str(scenario) msg += \"\\nWith Error:\" msg += str(ex) print(msg) raise ex try:", "% (scenario, scenarioInst) print(msg) raise ex return process def init_argparse(): parser = argparse.ArgumentParser(", "print(msg) raise ex else: try: from Configuration.DataProcessing.GetScenario import getScenario scenarioInst = getScenario(scenario) except", "\"+args.funcargs) sys.exit(1) func_args = json_data process=create_process(args, func_args) with open(args.output_pkl, \"wb\") as output_file: if", "python import FWCore.ParameterSet.Config as cms import pickle try: import argparse except ImportError: #get", "raise ex else: try: from Configuration.DataProcessing.GetScenario import getScenario scenarioInst = getScenario(scenario) except Exception", "else: try: from Configuration.DataProcessing.GetScenario import getScenario scenarioInst = getScenario(scenario) except Exception as ex:", "as ex: msg = \"Failed to create a repack process.\" print(msg) raise ex", "Scenario %s (%s).\" % (scenario, scenarioInst) print(msg) raise ex return process def init_argparse():", "msg += str(ex) print(msg) raise ex try: process = getattr(scenarioInst, args.funcname)(**func_args) except Exception", "= argparse.ArgumentParser( usage=\"%(prog)s [OPTION] [FILE]...\", description=\"Process creator (merge, DataProcessing etc)\" ) parser.add_argument('--funcname', required=True)", "(merge, DataProcessing etc)\" ) parser.add_argument('--funcname', required=True) parser.add_argument('--funcargs', required=True) parser.add_argument('--useErrorDataset', action=\"store_true\", required=False) parser.add_argument('--output_pkl', required=True)", "package instead import archived_argparse as argparse import sys, re, os import json from", "scenarioInst) print(msg) raise ex return process def init_argparse(): parser = argparse.ArgumentParser( usage=\"%(prog)s [OPTION]", "args.funcname)(**func_args) except Exception as ex: msg = \"Failed to load process from Scenario", "the Scenario named \" msg += str(scenario) msg += \"\\nWith Error:\" msg +=", "\"\\nWith Error:\" msg += str(ex) print(msg) raise ex try: process = getattr(scenarioInst, args.funcname)(**func_args)", "this package instead import archived_argparse as argparse import sys, re, os import json", "try: process = getattr(scenarioInst, args.funcname)(**func_args) except Exception as ex: msg = \"Failed to", "e: print(\"Error opening file \"+args.funcargs) sys.exit(1) if not isinstance(json_data,dict): print(\"Error loading dictionary \"+args.funcargs)", "if not isinstance(json_data,dict): print(\"Error loading dictionary \"+args.funcargs) sys.exit(1) func_args = json_data process=create_process(args, func_args)", "import mergeProcess process = mergeProcess(**func_args) except Exception as ex: msg = \"Failed to", "ex else: try: from Configuration.DataProcessing.GetScenario import getScenario scenarioInst = getScenario(scenario) except Exception as", "process def init_argparse(): parser = argparse.ArgumentParser( usage=\"%(prog)s [OPTION] [FILE]...\", description=\"Process creator (merge, DataProcessing", "sys.exit(1) if not isinstance(json_data,dict): print(\"Error loading dictionary \"+args.funcargs) sys.exit(1) func_args = json_data process=create_process(args,", "parser = argparse.ArgumentParser( usage=\"%(prog)s [OPTION] [FILE]...\", description=\"Process creator (merge, DataProcessing etc)\" ) parser.add_argument('--funcname',", "required=False) parser.add_argument('--output_pkl', required=True) return parser def main(): parser = init_argparse() args = parser.parse_args()", "process = repackProcess(**func_args) except Exception as ex: msg = \"Failed to create a", "\"Failed to retrieve the Scenario named \" msg += str(scenario) msg += \"\\nWith", "getScenario(scenario) except Exception as ex: msg = \"Failed to retrieve the Scenario named", "except Exception as ex: msg = \"Failed to load process from Scenario %s", "msg = \"Failed to retrieve the Scenario named \" msg += str(scenario) msg", "def main(): parser = init_argparse() args = parser.parse_args() func_args={} try: with open(args.funcargs) as", "opening file \"+args.funcargs) sys.exit(1) if not isinstance(json_data,dict): print(\"Error loading dictionary \"+args.funcargs) sys.exit(1) func_args", "ex: msg = \"Failed to retrieve the Scenario named \" msg += str(scenario)", "as cms import pickle try: import argparse except ImportError: #get it from this", "mergeProcess(**func_args) except Exception as ex: msg = \"Failed to create a merge process.\"", "with open(args.output_pkl, \"wb\") as output_file: if output_file.closed: print(\"Error loading pickle input \"+args.output_pkl[i]) sys.exit(1)", "\"wb\") as output_file: if output_file.closed: print(\"Error loading pickle input \"+args.output_pkl[i]) sys.exit(1) pickle.dump(process, output_file,", "\"merge\": if not args.useErrorDataset: func_args['outputmod_label'] = \"MergedError\" try: from Configuration.DataProcessing.Merge import mergeProcess process", "== \"repack\": try: from Configuration.DataProcessing.Repack import repackProcess process = repackProcess(**func_args) except Exception as", "+= str(ex) print(msg) raise ex try: process = getattr(scenarioInst, args.funcname)(**func_args) except Exception as", "as e: print(\"Error opening file \"+args.funcargs) sys.exit(1) if not isinstance(json_data,dict): print(\"Error loading dictionary", "from Configuration.DataProcessing.GetScenario import getScenario scenarioInst = getScenario(scenario) except Exception as ex: msg =", "required=True) return parser def main(): parser = init_argparse() args = parser.parse_args() func_args={} try:", "args.funcname == \"repack\": try: from Configuration.DataProcessing.Repack import repackProcess process = repackProcess(**func_args) except Exception", ") parser.add_argument('--funcname', required=True) parser.add_argument('--funcargs', required=True) parser.add_argument('--useErrorDataset', action=\"store_true\", required=False) parser.add_argument('--output_pkl', required=True) return parser def", "argparse.ArgumentParser( usage=\"%(prog)s [OPTION] [FILE]...\", description=\"Process creator (merge, DataProcessing etc)\" ) parser.add_argument('--funcname', required=True) parser.add_argument('--funcargs',", "ex elif args.funcname == \"repack\": try: from Configuration.DataProcessing.Repack import repackProcess process = repackProcess(**func_args)", "args = parser.parse_args() func_args={} try: with open(args.funcargs) as json_file: json_data = json.load(json_file) except", "not isinstance(json_data,dict): print(\"Error loading dictionary \"+args.funcargs) sys.exit(1) func_args = json_data process=create_process(args, func_args) with", "(scenario, scenarioInst) print(msg) raise ex return process def init_argparse(): parser = argparse.ArgumentParser( usage=\"%(prog)s", "\"+args.funcargs) sys.exit(1) if not isinstance(json_data,dict): print(\"Error loading dictionary \"+args.funcargs) sys.exit(1) func_args = json_data", "loading dictionary \"+args.funcargs) sys.exit(1) func_args = json_data process=create_process(args, func_args) with open(args.output_pkl, \"wb\") as", "args.funcname == \"merge\": if not args.useErrorDataset: func_args['outputmod_label'] = \"MergedError\" try: from Configuration.DataProcessing.Merge import", "parser.add_argument('--funcname', required=True) parser.add_argument('--funcargs', required=True) parser.add_argument('--useErrorDataset', action=\"store_true\", required=False) parser.add_argument('--output_pkl', required=True) return parser def main():", "output_file: if output_file.closed: print(\"Error loading pickle input \"+args.output_pkl[i]) sys.exit(1) pickle.dump(process, output_file, protocol=0) main()", "import repackProcess process = repackProcess(**func_args) except Exception as ex: msg = \"Failed to", "Configuration.DataProcessing.Merge import mergeProcess process = mergeProcess(**func_args) except Exception as ex: msg = \"Failed", "repackProcess process = repackProcess(**func_args) except Exception as ex: msg = \"Failed to create", "\"Failed to create a repack process.\" print(msg) raise ex else: try: from Configuration.DataProcessing.GetScenario", "named \" msg += str(scenario) msg += \"\\nWith Error:\" msg += str(ex) print(msg)", "Scenario named \" msg += str(scenario) msg += \"\\nWith Error:\" msg += str(ex)", "Error:\" msg += str(ex) print(msg) raise ex try: process = getattr(scenarioInst, args.funcname)(**func_args) except", "a repack process.\" print(msg) raise ex else: try: from Configuration.DataProcessing.GetScenario import getScenario scenarioInst", "load process from Scenario %s (%s).\" % (scenario, scenarioInst) print(msg) raise ex return", "open(args.output_pkl, \"wb\") as output_file: if output_file.closed: print(\"Error loading pickle input \"+args.output_pkl[i]) sys.exit(1) pickle.dump(process,", "instead import archived_argparse as argparse import sys, re, os import json from tweak_program_helpers", "try: from Configuration.DataProcessing.Merge import mergeProcess process = mergeProcess(**func_args) except Exception as ex: msg", "as argparse import sys, re, os import json from tweak_program_helpers import make_parser def", "[FILE]...\", description=\"Process creator (merge, DataProcessing etc)\" ) parser.add_argument('--funcname', required=True) parser.add_argument('--funcargs', required=True) parser.add_argument('--useErrorDataset', action=\"store_true\",", "from Configuration.DataProcessing.Merge import mergeProcess process = mergeProcess(**func_args) except Exception as ex: msg =", "process from Scenario %s (%s).\" % (scenario, scenarioInst) print(msg) raise ex return process", "= getattr(scenarioInst, args.funcname)(**func_args) except Exception as ex: msg = \"Failed to load process", "Exception as ex: msg = \"Failed to load process from Scenario %s (%s).\"", "import argparse except ImportError: #get it from this package instead import archived_argparse as", "process=create_process(args, func_args) with open(args.output_pkl, \"wb\") as output_file: if output_file.closed: print(\"Error loading pickle input", "tweak_program_helpers import make_parser def create_process(args,func_args): if args.funcname == \"merge\": if not args.useErrorDataset: func_args['outputmod_label']", "def create_process(args,func_args): if args.funcname == \"merge\": if not args.useErrorDataset: func_args['outputmod_label'] = \"MergedError\" try:", "[OPTION] [FILE]...\", description=\"Process creator (merge, DataProcessing etc)\" ) parser.add_argument('--funcname', required=True) parser.add_argument('--funcargs', required=True) parser.add_argument('--useErrorDataset',", "parser.parse_args() func_args={} try: with open(args.funcargs) as json_file: json_data = json.load(json_file) except Exception as", "main(): parser = init_argparse() args = parser.parse_args() func_args={} try: with open(args.funcargs) as json_file:", "raise ex elif args.funcname == \"repack\": try: from Configuration.DataProcessing.Repack import repackProcess process =", "raise ex return process def init_argparse(): parser = argparse.ArgumentParser( usage=\"%(prog)s [OPTION] [FILE]...\", description=\"Process", "file \"+args.funcargs) sys.exit(1) if not isinstance(json_data,dict): print(\"Error loading dictionary \"+args.funcargs) sys.exit(1) func_args =", "import sys, re, os import json from tweak_program_helpers import make_parser def create_process(args,func_args): if", "ex try: process = getattr(scenarioInst, args.funcname)(**func_args) except Exception as ex: msg = \"Failed", "action=\"store_true\", required=False) parser.add_argument('--output_pkl', required=True) return parser def main(): parser = init_argparse() args =", "try: import argparse except ImportError: #get it from this package instead import archived_argparse", "print(msg) raise ex elif args.funcname == \"repack\": try: from Configuration.DataProcessing.Repack import repackProcess process", "= parser.parse_args() func_args={} try: with open(args.funcargs) as json_file: json_data = json.load(json_file) except Exception" ]
[]
[ "class TestImagineFilterInterface(unittest.TestCase): interface = None def setUp(self): self.interface = ImagineFilterInterface() def test_not_implemented_apply_method(self): with", "flask.ext.imagine.filters.interface import ImagineFilterInterface class TestImagineFilterInterface(unittest.TestCase): interface = None def setUp(self): self.interface = ImagineFilterInterface()", "interface = None def setUp(self): self.interface = ImagineFilterInterface() def test_not_implemented_apply_method(self): with self.assertRaises(NotImplementedError): self.interface.apply('')", "ImagineFilterInterface class TestImagineFilterInterface(unittest.TestCase): interface = None def setUp(self): self.interface = ImagineFilterInterface() def test_not_implemented_apply_method(self):", "from flask.ext.imagine.filters.interface import ImagineFilterInterface class TestImagineFilterInterface(unittest.TestCase): interface = None def setUp(self): self.interface =", "import unittest from flask.ext.imagine.filters.interface import ImagineFilterInterface class TestImagineFilterInterface(unittest.TestCase): interface = None def setUp(self):", "TestImagineFilterInterface(unittest.TestCase): interface = None def setUp(self): self.interface = ImagineFilterInterface() def test_not_implemented_apply_method(self): with self.assertRaises(NotImplementedError):", "unittest from flask.ext.imagine.filters.interface import ImagineFilterInterface class TestImagineFilterInterface(unittest.TestCase): interface = None def setUp(self): self.interface", "import ImagineFilterInterface class TestImagineFilterInterface(unittest.TestCase): interface = None def setUp(self): self.interface = ImagineFilterInterface() def" ]
[ "id): cursor = self.__db.connection.cursor() cursor.execute(SQL_USER_BY_ID, (id,)) data = cursor.fetchone() user = translate_user(data) if", "= db def save_game(self, game): cursor = self.__db.connection.cursor() if game.id: cursor.execute( SQL_UPDATE_GAME, (game.name,", "models import Game, User SQL_DELETE_GAME = \"delete from game where id = %s\"", "db): self.__db = db def search_by_id(self, id): cursor = self.__db.connection.cursor() cursor.execute(SQL_USER_BY_ID, (id,)) data", "= self.__db.connection.cursor() cursor.execute(SQL_USER_BY_ID, (id,)) data = cursor.fetchone() user = translate_user(data) if data else", "name, category, console from game where id = %s\" SQL_USER_BY_ID = \"SELECT id,", "game.console, game.id) ) else: cursor.execute(SQL_CREATE_GAME, (game.name, game.category, game.console)) game.id = cursor.lastrowid self.__db.connection.commit() return", "def create_game_with_tuple(game_tuple): return Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0]) return list(map(create_game_with_tuple, games)) def translate_user(user_tuple): return", "= db def search_by_id(self, id): cursor = self.__db.connection.cursor() cursor.execute(SQL_USER_BY_ID, (id,)) data = cursor.fetchone()", "id, name, password from user where id = %s\" SQL_UPDATE_GAME = \"UPDATE game", "self.__db.connection.cursor() cursor.execute(SQL_GAME_BY_ID, (id,)) game_tuple = cursor.fetchone() return Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0]) def delete_game(self,", "search_by_id(self, id): cursor = self.__db.connection.cursor() cursor.execute(SQL_USER_BY_ID, (id,)) data = cursor.fetchone() user = translate_user(data)", "return user def translate_games(games): def create_game_with_tuple(game_tuple): return Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0]) return list(map(create_game_with_tuple,", "id = %s\" SQL_USER_BY_ID = \"SELECT id, name, password from user where id", "= translate_games(cursor.fetchall()) return games def search_by_id(self, id): cursor = self.__db.connection.cursor() cursor.execute(SQL_GAME_BY_ID, (id,)) game_tuple", "game SET name=%s, category=%s, console=%s where id = %s\" SQL_SEARCH_GAMES = \"SELECT id,", "cursor = self.__db.connection.cursor() cursor.execute(SQL_SEARCH_GAMES) games = translate_games(cursor.fetchall()) return games def search_by_id(self, id): cursor", "search_by_id(self, id): cursor = self.__db.connection.cursor() cursor.execute(SQL_GAME_BY_ID, (id,)) game_tuple = cursor.fetchone() return Game(game_tuple[1], game_tuple[2],", "(id,)) self.__db.connection.commit() class UserDao: def __init__(self, db): self.__db = db def search_by_id(self, id):", "return Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0]) return list(map(create_game_with_tuple, games)) def translate_user(user_tuple): return User(user_tuple[0], user_tuple[1],", "cursor.execute(SQL_SEARCH_GAMES) games = translate_games(cursor.fetchall()) return games def search_by_id(self, id): cursor = self.__db.connection.cursor() cursor.execute(SQL_GAME_BY_ID,", "GameDao: def __init__(self, db): self.__db = db def save_game(self, game): cursor = self.__db.connection.cursor()", "values (%s, %s, %s)\" class GameDao: def __init__(self, db): self.__db = db def", "SET name=%s, category=%s, console=%s where id = %s\" SQL_SEARCH_GAMES = \"SELECT id, name,", "= cursor.fetchone() user = translate_user(data) if data else None return user def translate_games(games):", "cursor = self.__db.connection.cursor() if game.id: cursor.execute( SQL_UPDATE_GAME, (game.name, game.category, game.console, game.id) ) else:", "games def search_by_id(self, id): cursor = self.__db.connection.cursor() cursor.execute(SQL_GAME_BY_ID, (id,)) game_tuple = cursor.fetchone() return", "else: cursor.execute(SQL_CREATE_GAME, (game.name, game.category, game.console)) game.id = cursor.lastrowid self.__db.connection.commit() return game def list_game(self):", "game def list_game(self): cursor = self.__db.connection.cursor() cursor.execute(SQL_SEARCH_GAMES) games = translate_games(cursor.fetchall()) return games def", "= \"SELECT id, name, password from user where id = %s\" SQL_UPDATE_GAME =", "cursor.fetchone() user = translate_user(data) if data else None return user def translate_games(games): def", "password from user where id = %s\" SQL_UPDATE_GAME = \"UPDATE game SET name=%s,", "\"UPDATE game SET name=%s, category=%s, console=%s where id = %s\" SQL_SEARCH_GAMES = \"SELECT", "SQL_UPDATE_GAME, (game.name, game.category, game.console, game.id) ) else: cursor.execute(SQL_CREATE_GAME, (game.name, game.category, game.console)) game.id =", "class GameDao: def __init__(self, db): self.__db = db def save_game(self, game): cursor =", "= \"INSERT into game (name, category, console) values (%s, %s, %s)\" class GameDao:", "= cursor.lastrowid self.__db.connection.commit() return game def list_game(self): cursor = self.__db.connection.cursor() cursor.execute(SQL_SEARCH_GAMES) games =", "id, name, category, console from game\" SQL_CREATE_GAME = \"INSERT into game (name, category,", "if data else None return user def translate_games(games): def create_game_with_tuple(game_tuple): return Game(game_tuple[1], game_tuple[2],", "(id,)) game_tuple = cursor.fetchone() return Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0]) def delete_game(self, id): self.__db.connection.cursor().execute(SQL_DELETE_GAME,", "\"delete from game where id = %s\" SQL_GAME_BY_ID = \"SELECT id, name, category,", "id = %s\" SQL_UPDATE_GAME = \"UPDATE game SET name=%s, category=%s, console=%s where id", "= %s\" SQL_UPDATE_GAME = \"UPDATE game SET name=%s, category=%s, console=%s where id =", "from game where id = %s\" SQL_USER_BY_ID = \"SELECT id, name, password from", "UserDao: def __init__(self, db): self.__db = db def search_by_id(self, id): cursor = self.__db.connection.cursor()", "(%s, %s, %s)\" class GameDao: def __init__(self, db): self.__db = db def save_game(self,", "def __init__(self, db): self.__db = db def save_game(self, game): cursor = self.__db.connection.cursor() if", "id, name, category, console from game where id = %s\" SQL_USER_BY_ID = \"SELECT", "id = %s\" SQL_GAME_BY_ID = \"SELECT id, name, category, console from game where", "return Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0]) def delete_game(self, id): self.__db.connection.cursor().execute(SQL_DELETE_GAME, (id,)) self.__db.connection.commit() class UserDao:", "where id = %s\" SQL_SEARCH_GAMES = \"SELECT id, name, category, console from game\"", "cursor.execute(SQL_CREATE_GAME, (game.name, game.category, game.console)) game.id = cursor.lastrowid self.__db.connection.commit() return game def list_game(self): cursor", "game (name, category, console) values (%s, %s, %s)\" class GameDao: def __init__(self, db):", "SQL_UPDATE_GAME = \"UPDATE game SET name=%s, category=%s, console=%s where id = %s\" SQL_SEARCH_GAMES", "console from game\" SQL_CREATE_GAME = \"INSERT into game (name, category, console) values (%s,", "game.id: cursor.execute( SQL_UPDATE_GAME, (game.name, game.category, game.console, game.id) ) else: cursor.execute(SQL_CREATE_GAME, (game.name, game.category, game.console))", "Game, User SQL_DELETE_GAME = \"delete from game where id = %s\" SQL_GAME_BY_ID =", "SQL_GAME_BY_ID = \"SELECT id, name, category, console from game where id = %s\"", "category, console) values (%s, %s, %s)\" class GameDao: def __init__(self, db): self.__db =", "else None return user def translate_games(games): def create_game_with_tuple(game_tuple): return Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0])", "create_game_with_tuple(game_tuple): return Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0]) return list(map(create_game_with_tuple, games)) def translate_user(user_tuple): return User(user_tuple[0],", "translate_games(cursor.fetchall()) return games def search_by_id(self, id): cursor = self.__db.connection.cursor() cursor.execute(SQL_GAME_BY_ID, (id,)) game_tuple =", "import Game, User SQL_DELETE_GAME = \"delete from game where id = %s\" SQL_GAME_BY_ID", "SQL_DELETE_GAME = \"delete from game where id = %s\" SQL_GAME_BY_ID = \"SELECT id,", "(id,)) data = cursor.fetchone() user = translate_user(data) if data else None return user", "data = cursor.fetchone() user = translate_user(data) if data else None return user def", "%s\" SQL_SEARCH_GAMES = \"SELECT id, name, category, console from game\" SQL_CREATE_GAME = \"INSERT", "self.__db.connection.commit() class UserDao: def __init__(self, db): self.__db = db def search_by_id(self, id): cursor", "__init__(self, db): self.__db = db def save_game(self, game): cursor = self.__db.connection.cursor() if game.id:", "Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0]) return list(map(create_game_with_tuple, games)) def translate_user(user_tuple): return User(user_tuple[0], user_tuple[1], user_tuple[2])", "delete_game(self, id): self.__db.connection.cursor().execute(SQL_DELETE_GAME, (id,)) self.__db.connection.commit() class UserDao: def __init__(self, db): self.__db = db", "def list_game(self): cursor = self.__db.connection.cursor() cursor.execute(SQL_SEARCH_GAMES) games = translate_games(cursor.fetchall()) return games def search_by_id(self,", "user = translate_user(data) if data else None return user def translate_games(games): def create_game_with_tuple(game_tuple):", "self.__db.connection.cursor() if game.id: cursor.execute( SQL_UPDATE_GAME, (game.name, game.category, game.console, game.id) ) else: cursor.execute(SQL_CREATE_GAME, (game.name,", "game.id = cursor.lastrowid self.__db.connection.commit() return game def list_game(self): cursor = self.__db.connection.cursor() cursor.execute(SQL_SEARCH_GAMES) games", "= \"UPDATE game SET name=%s, category=%s, console=%s where id = %s\" SQL_SEARCH_GAMES =", "self.__db.connection.cursor().execute(SQL_DELETE_GAME, (id,)) self.__db.connection.commit() class UserDao: def __init__(self, db): self.__db = db def search_by_id(self,", "%s\" SQL_UPDATE_GAME = \"UPDATE game SET name=%s, category=%s, console=%s where id = %s\"", "category, console from game\" SQL_CREATE_GAME = \"INSERT into game (name, category, console) values", "where id = %s\" SQL_UPDATE_GAME = \"UPDATE game SET name=%s, category=%s, console=%s where", "if game.id: cursor.execute( SQL_UPDATE_GAME, (game.name, game.category, game.console, game.id) ) else: cursor.execute(SQL_CREATE_GAME, (game.name, game.category,", "from user where id = %s\" SQL_UPDATE_GAME = \"UPDATE game SET name=%s, category=%s,", "return game def list_game(self): cursor = self.__db.connection.cursor() cursor.execute(SQL_SEARCH_GAMES) games = translate_games(cursor.fetchall()) return games", "def delete_game(self, id): self.__db.connection.cursor().execute(SQL_DELETE_GAME, (id,)) self.__db.connection.commit() class UserDao: def __init__(self, db): self.__db =", "db): self.__db = db def save_game(self, game): cursor = self.__db.connection.cursor() if game.id: cursor.execute(", "__init__(self, db): self.__db = db def search_by_id(self, id): cursor = self.__db.connection.cursor() cursor.execute(SQL_USER_BY_ID, (id,))", "(name, category, console) values (%s, %s, %s)\" class GameDao: def __init__(self, db): self.__db", "id = %s\" SQL_SEARCH_GAMES = \"SELECT id, name, category, console from game\" SQL_CREATE_GAME", "= %s\" SQL_SEARCH_GAMES = \"SELECT id, name, category, console from game\" SQL_CREATE_GAME =", "data else None return user def translate_games(games): def create_game_with_tuple(game_tuple): return Game(game_tuple[1], game_tuple[2], game_tuple[3],", "None return user def translate_games(games): def create_game_with_tuple(game_tuple): return Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0]) return", "cursor = self.__db.connection.cursor() cursor.execute(SQL_GAME_BY_ID, (id,)) game_tuple = cursor.fetchone() return Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0])", "game\" SQL_CREATE_GAME = \"INSERT into game (name, category, console) values (%s, %s, %s)\"", "%s, %s)\" class GameDao: def __init__(self, db): self.__db = db def save_game(self, game):", "game_tuple = cursor.fetchone() return Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0]) def delete_game(self, id): self.__db.connection.cursor().execute(SQL_DELETE_GAME, (id,))", "cursor = self.__db.connection.cursor() cursor.execute(SQL_USER_BY_ID, (id,)) data = cursor.fetchone() user = translate_user(data) if data", "= %s\" SQL_GAME_BY_ID = \"SELECT id, name, category, console from game where id", "list_game(self): cursor = self.__db.connection.cursor() cursor.execute(SQL_SEARCH_GAMES) games = translate_games(cursor.fetchall()) return games def search_by_id(self, id):", "cursor.execute(SQL_USER_BY_ID, (id,)) data = cursor.fetchone() user = translate_user(data) if data else None return", "%s\" SQL_USER_BY_ID = \"SELECT id, name, password from user where id = %s\"", "user where id = %s\" SQL_UPDATE_GAME = \"UPDATE game SET name=%s, category=%s, console=%s", "db def save_game(self, game): cursor = self.__db.connection.cursor() if game.id: cursor.execute( SQL_UPDATE_GAME, (game.name, game.category,", "game.id) ) else: cursor.execute(SQL_CREATE_GAME, (game.name, game.category, game.console)) game.id = cursor.lastrowid self.__db.connection.commit() return game", "game.category, game.console)) game.id = cursor.lastrowid self.__db.connection.commit() return game def list_game(self): cursor = self.__db.connection.cursor()", "Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0]) def delete_game(self, id): self.__db.connection.cursor().execute(SQL_DELETE_GAME, (id,)) self.__db.connection.commit() class UserDao: def", "cursor.execute( SQL_UPDATE_GAME, (game.name, game.category, game.console, game.id) ) else: cursor.execute(SQL_CREATE_GAME, (game.name, game.category, game.console)) game.id", "= self.__db.connection.cursor() if game.id: cursor.execute( SQL_UPDATE_GAME, (game.name, game.category, game.console, game.id) ) else: cursor.execute(SQL_CREATE_GAME,", "name=%s, category=%s, console=%s where id = %s\" SQL_SEARCH_GAMES = \"SELECT id, name, category,", "= translate_user(data) if data else None return user def translate_games(games): def create_game_with_tuple(game_tuple): return", "console) values (%s, %s, %s)\" class GameDao: def __init__(self, db): self.__db = db", "= \"SELECT id, name, category, console from game where id = %s\" SQL_USER_BY_ID", "\"SELECT id, name, category, console from game where id = %s\" SQL_USER_BY_ID =", "self.__db.connection.cursor() cursor.execute(SQL_SEARCH_GAMES) games = translate_games(cursor.fetchall()) return games def search_by_id(self, id): cursor = self.__db.connection.cursor()", "return games def search_by_id(self, id): cursor = self.__db.connection.cursor() cursor.execute(SQL_GAME_BY_ID, (id,)) game_tuple = cursor.fetchone()", "category, console from game where id = %s\" SQL_USER_BY_ID = \"SELECT id, name,", "where id = %s\" SQL_GAME_BY_ID = \"SELECT id, name, category, console from game", "<filename>alura-python/gamelib/dao.py from models import Game, User SQL_DELETE_GAME = \"delete from game where id", "game.category, game.console, game.id) ) else: cursor.execute(SQL_CREATE_GAME, (game.name, game.category, game.console)) game.id = cursor.lastrowid self.__db.connection.commit()", "game.console)) game.id = cursor.lastrowid self.__db.connection.commit() return game def list_game(self): cursor = self.__db.connection.cursor() cursor.execute(SQL_SEARCH_GAMES)", "from game\" SQL_CREATE_GAME = \"INSERT into game (name, category, console) values (%s, %s,", "name, password from user where id = %s\" SQL_UPDATE_GAME = \"UPDATE game SET", "User SQL_DELETE_GAME = \"delete from game where id = %s\" SQL_GAME_BY_ID = \"SELECT", "SQL_SEARCH_GAMES = \"SELECT id, name, category, console from game\" SQL_CREATE_GAME = \"INSERT into", "= \"delete from game where id = %s\" SQL_GAME_BY_ID = \"SELECT id, name,", "def search_by_id(self, id): cursor = self.__db.connection.cursor() cursor.execute(SQL_GAME_BY_ID, (id,)) game_tuple = cursor.fetchone() return Game(game_tuple[1],", "def __init__(self, db): self.__db = db def search_by_id(self, id): cursor = self.__db.connection.cursor() cursor.execute(SQL_USER_BY_ID,", "= self.__db.connection.cursor() cursor.execute(SQL_SEARCH_GAMES) games = translate_games(cursor.fetchall()) return games def search_by_id(self, id): cursor =", ") else: cursor.execute(SQL_CREATE_GAME, (game.name, game.category, game.console)) game.id = cursor.lastrowid self.__db.connection.commit() return game def", "\"SELECT id, name, password from user where id = %s\" SQL_UPDATE_GAME = \"UPDATE", "self.__db = db def search_by_id(self, id): cursor = self.__db.connection.cursor() cursor.execute(SQL_USER_BY_ID, (id,)) data =", "translate_user(data) if data else None return user def translate_games(games): def create_game_with_tuple(game_tuple): return Game(game_tuple[1],", "console=%s where id = %s\" SQL_SEARCH_GAMES = \"SELECT id, name, category, console from", "cursor.execute(SQL_GAME_BY_ID, (id,)) game_tuple = cursor.fetchone() return Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0]) def delete_game(self, id):", "console from game where id = %s\" SQL_USER_BY_ID = \"SELECT id, name, password", "def translate_games(games): def create_game_with_tuple(game_tuple): return Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0]) return list(map(create_game_with_tuple, games)) def", "(game.name, game.category, game.console, game.id) ) else: cursor.execute(SQL_CREATE_GAME, (game.name, game.category, game.console)) game.id = cursor.lastrowid", "game where id = %s\" SQL_GAME_BY_ID = \"SELECT id, name, category, console from", "(game.name, game.category, game.console)) game.id = cursor.lastrowid self.__db.connection.commit() return game def list_game(self): cursor =", "= %s\" SQL_USER_BY_ID = \"SELECT id, name, password from user where id =", "category=%s, console=%s where id = %s\" SQL_SEARCH_GAMES = \"SELECT id, name, category, console", "= self.__db.connection.cursor() cursor.execute(SQL_GAME_BY_ID, (id,)) game_tuple = cursor.fetchone() return Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0]) def", "name, category, console from game\" SQL_CREATE_GAME = \"INSERT into game (name, category, console)", "game_tuple[2], game_tuple[3], id=game_tuple[0]) def delete_game(self, id): self.__db.connection.cursor().execute(SQL_DELETE_GAME, (id,)) self.__db.connection.commit() class UserDao: def __init__(self,", "id): self.__db.connection.cursor().execute(SQL_DELETE_GAME, (id,)) self.__db.connection.commit() class UserDao: def __init__(self, db): self.__db = db def", "cursor.lastrowid self.__db.connection.commit() return game def list_game(self): cursor = self.__db.connection.cursor() cursor.execute(SQL_SEARCH_GAMES) games = translate_games(cursor.fetchall())", "id=game_tuple[0]) def delete_game(self, id): self.__db.connection.cursor().execute(SQL_DELETE_GAME, (id,)) self.__db.connection.commit() class UserDao: def __init__(self, db): self.__db", "user def translate_games(games): def create_game_with_tuple(game_tuple): return Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0]) return list(map(create_game_with_tuple, games))", "game_tuple[3], id=game_tuple[0]) def delete_game(self, id): self.__db.connection.cursor().execute(SQL_DELETE_GAME, (id,)) self.__db.connection.commit() class UserDao: def __init__(self, db):", "\"SELECT id, name, category, console from game\" SQL_CREATE_GAME = \"INSERT into game (name,", "cursor.fetchone() return Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0]) def delete_game(self, id): self.__db.connection.cursor().execute(SQL_DELETE_GAME, (id,)) self.__db.connection.commit() class", "%s)\" class GameDao: def __init__(self, db): self.__db = db def save_game(self, game): cursor", "save_game(self, game): cursor = self.__db.connection.cursor() if game.id: cursor.execute( SQL_UPDATE_GAME, (game.name, game.category, game.console, game.id)", "translate_games(games): def create_game_with_tuple(game_tuple): return Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0]) return list(map(create_game_with_tuple, games)) def translate_user(user_tuple):", "%s\" SQL_GAME_BY_ID = \"SELECT id, name, category, console from game where id =", "= \"SELECT id, name, category, console from game\" SQL_CREATE_GAME = \"INSERT into game", "self.__db.connection.commit() return game def list_game(self): cursor = self.__db.connection.cursor() cursor.execute(SQL_SEARCH_GAMES) games = translate_games(cursor.fetchall()) return", "game where id = %s\" SQL_USER_BY_ID = \"SELECT id, name, password from user", "self.__db.connection.cursor() cursor.execute(SQL_USER_BY_ID, (id,)) data = cursor.fetchone() user = translate_user(data) if data else None", "from game where id = %s\" SQL_GAME_BY_ID = \"SELECT id, name, category, console", "class UserDao: def __init__(self, db): self.__db = db def search_by_id(self, id): cursor =", "where id = %s\" SQL_USER_BY_ID = \"SELECT id, name, password from user where", "= cursor.fetchone() return Game(game_tuple[1], game_tuple[2], game_tuple[3], id=game_tuple[0]) def delete_game(self, id): self.__db.connection.cursor().execute(SQL_DELETE_GAME, (id,)) self.__db.connection.commit()", "game): cursor = self.__db.connection.cursor() if game.id: cursor.execute( SQL_UPDATE_GAME, (game.name, game.category, game.console, game.id) )", "\"INSERT into game (name, category, console) values (%s, %s, %s)\" class GameDao: def", "from models import Game, User SQL_DELETE_GAME = \"delete from game where id =", "id): cursor = self.__db.connection.cursor() cursor.execute(SQL_GAME_BY_ID, (id,)) game_tuple = cursor.fetchone() return Game(game_tuple[1], game_tuple[2], game_tuple[3],", "def save_game(self, game): cursor = self.__db.connection.cursor() if game.id: cursor.execute( SQL_UPDATE_GAME, (game.name, game.category, game.console,", "def search_by_id(self, id): cursor = self.__db.connection.cursor() cursor.execute(SQL_USER_BY_ID, (id,)) data = cursor.fetchone() user =", "SQL_USER_BY_ID = \"SELECT id, name, password from user where id = %s\" SQL_UPDATE_GAME", "games = translate_games(cursor.fetchall()) return games def search_by_id(self, id): cursor = self.__db.connection.cursor() cursor.execute(SQL_GAME_BY_ID, (id,))", "self.__db = db def save_game(self, game): cursor = self.__db.connection.cursor() if game.id: cursor.execute( SQL_UPDATE_GAME,", "SQL_CREATE_GAME = \"INSERT into game (name, category, console) values (%s, %s, %s)\" class", "into game (name, category, console) values (%s, %s, %s)\" class GameDao: def __init__(self,", "db def search_by_id(self, id): cursor = self.__db.connection.cursor() cursor.execute(SQL_USER_BY_ID, (id,)) data = cursor.fetchone() user" ]
[ "'result': result, } return payload, offset def write_attribute_value(data: bytes, offset: int = 0):", "return payload, offset def send_user_read_response(data: bytes, offset: int = 0): FORMAT = '<HH'", "} return payload, offset def read_attribute_type(data: bytes, offset: int = 0): FORMAT =", "= data[offset:offset + n] offset += n if len(value) < n: raise error", "payload = { 'result': result, 'sent_len': sent_len, } return payload, offset def read_attribute_type(data:", "offset def set_capabilities(data: bytes, offset: int = 0): FORMAT = '<H' result, =", "data[offset:offset + n] offset += n if len(_type) < n: raise error payload", "+= calcsize(FORMAT) payload = { 'result': result, } return payload, offset def set_capabilities(data:", "def read_attribute_type(data: bytes, offset: int = 0): FORMAT = '<HB' result, n =", "n = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) _type = data[offset:offset + n]", "unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) _type = data[offset:offset + n] offset +=", "data, offset=offset) offset += calcsize(FORMAT) payload = { 'result': result, } return payload,", "<reponame>GetAmbush/python-bgapi from struct import (unpack_from, calcsize, error) def find_attribute(data: bytes, offset: int =", "payload = { 'result': result, 'value': value, } return payload, offset def send_characteristic_notification(data:", "n] offset += n if len(_type) < n: raise error payload = {", "raise error payload = { 'result': result, 'type': _type, } return payload, offset", "FORMAT = '<HB' result, n = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) _type", "send_user_write_response(data: bytes, offset: int = 0): FORMAT = '<H' result, = unpack_from(FORMAT, data,", "= { 'result': result, 'value': value, } return payload, offset def send_characteristic_notification(data: bytes,", "if len(value) < n: raise error payload = { 'result': result, 'value': value,", "result, sent_len = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) payload = { 'result':", "{ 'result': result, 'value': value, } return payload, offset def send_characteristic_notification(data: bytes, offset:", "error payload = { 'result': result, 'value': value, } return payload, offset def", "payload = { 'result': result, 'sent_len': sent_len, } return payload, offset def send_user_read_response(data:", "} return payload, offset def send_characteristic_notification(data: bytes, offset: int = 0): FORMAT =", "payload, offset def send_characteristic_notification(data: bytes, offset: int = 0): FORMAT = '<HH' result,", "= 0): FORMAT = '<HH' result, sent_len = unpack_from(FORMAT, data, offset=offset) offset +=", "offset=offset) offset += calcsize(FORMAT) value = data[offset:offset + n] offset += n if", "n: raise error payload = { 'result': result, 'value': value, } return payload,", "result, 'type': _type, } return payload, offset def read_attribute_value(data: bytes, offset: int =", "def send_user_write_response(data: bytes, offset: int = 0): FORMAT = '<H' result, = unpack_from(FORMAT,", "write_attribute_value(data: bytes, offset: int = 0): FORMAT = '<H' result, = unpack_from(FORMAT, data,", "offset def send_characteristic_notification(data: bytes, offset: int = 0): FORMAT = '<HH' result, sent_len", "def send_characteristic_notification(data: bytes, offset: int = 0): FORMAT = '<HH' result, sent_len =", "{ 'result': result, 'type': _type, } return payload, offset def read_attribute_value(data: bytes, offset:", "result, n = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) value = data[offset:offset +", "return payload, offset def send_characteristic_notification(data: bytes, offset: int = 0): FORMAT = '<HH'", "(unpack_from, calcsize, error) def find_attribute(data: bytes, offset: int = 0): FORMAT = '<HH'", "result, 'sent_len': sent_len, } return payload, offset def read_attribute_type(data: bytes, offset: int =", "_type, } return payload, offset def read_attribute_value(data: bytes, offset: int = 0): FORMAT", "calcsize(FORMAT) payload = { 'result': result, } return payload, offset def set_capabilities(data: bytes,", "0): FORMAT = '<HH' result, sent_len = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT)", "} return payload, offset def read_attribute_value(data: bytes, offset: int = 0): FORMAT =", "payload = { 'result': result, } return payload, offset def set_capabilities(data: bytes, offset:", "= { 'result': result, } return payload, offset def write_attribute_value(data: bytes, offset: int", "bytes, offset: int = 0): FORMAT = '<HH' result, sent_len = unpack_from(FORMAT, data,", "< n: raise error payload = { 'result': result, 'value': value, } return", "offset: int = 0): FORMAT = '<H' result, = unpack_from(FORMAT, data, offset=offset) offset", "= unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) value = data[offset:offset + n] offset", "= { 'result': result, 'sent_len': sent_len, } return payload, offset def send_user_read_response(data: bytes,", "= { 'result': result, 'sent_len': sent_len, } return payload, offset def read_attribute_type(data: bytes,", "offset += n if len(value) < n: raise error payload = { 'result':", "'<HB' result, n = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) value = data[offset:offset", "int = 0): FORMAT = '<HH' result, sent_len = unpack_from(FORMAT, data, offset=offset) offset", "offset def read_attribute_value(data: bytes, offset: int = 0): FORMAT = '<HB' result, n", "payload, offset def send_user_write_response(data: bytes, offset: int = 0): FORMAT = '<H' result,", "'result': result, 'sent_len': sent_len, } return payload, offset def send_user_write_response(data: bytes, offset: int", "offset def write_attribute_value(data: bytes, offset: int = 0): FORMAT = '<H' result, =", "unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) payload = { 'result': result, } return", "bytes, offset: int = 0): FORMAT = '<HB' result, n = unpack_from(FORMAT, data,", "payload = { 'result': result, 'type': _type, } return payload, offset def read_attribute_value(data:", "result, } return payload, offset def set_capabilities(data: bytes, offset: int = 0): FORMAT", "= { 'result': result, } return payload, offset def set_capabilities(data: bytes, offset: int", "error payload = { 'result': result, 'type': _type, } return payload, offset def", "= '<HB' result, n = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) _type =", "unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) payload = { 'result': result, 'sent_len': sent_len,", "'<HB' result, n = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) _type = data[offset:offset", "from struct import (unpack_from, calcsize, error) def find_attribute(data: bytes, offset: int = 0):", "n] offset += n if len(value) < n: raise error payload = {", "result, 'value': value, } return payload, offset def send_characteristic_notification(data: bytes, offset: int =", "payload = { 'result': result, 'sent_len': sent_len, } return payload, offset def send_user_write_response(data:", "struct import (unpack_from, calcsize, error) def find_attribute(data: bytes, offset: int = 0): FORMAT", "value, } return payload, offset def send_characteristic_notification(data: bytes, offset: int = 0): FORMAT", "payload, offset def send_user_read_response(data: bytes, offset: int = 0): FORMAT = '<HH' result,", "payload, offset def read_attribute_value(data: bytes, offset: int = 0): FORMAT = '<HB' result,", "'sent_len': sent_len, } return payload, offset def send_user_write_response(data: bytes, offset: int = 0):", "sent_len, } return payload, offset def read_attribute_type(data: bytes, offset: int = 0): FORMAT", "sent_len, } return payload, offset def send_user_read_response(data: bytes, offset: int = 0): FORMAT", "n = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) value = data[offset:offset + n]", "'<HH' result, sent_len = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) payload = {", "send_characteristic_notification(data: bytes, offset: int = 0): FORMAT = '<HH' result, sent_len = unpack_from(FORMAT,", "FORMAT = '<HH' result, sent_len = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) payload", "find_attribute(data: bytes, offset: int = 0): FORMAT = '<HH' result, sent_len = unpack_from(FORMAT,", "return payload, offset def read_attribute_value(data: bytes, offset: int = 0): FORMAT = '<HB'", "len(_type) < n: raise error payload = { 'result': result, 'type': _type, }", "calcsize(FORMAT) payload = { 'result': result, } return payload, offset def write_attribute_value(data: bytes,", "set_capabilities(data: bytes, offset: int = 0): FORMAT = '<H' result, = unpack_from(FORMAT, data,", "result, } return payload, offset def write_attribute_value(data: bytes, offset: int = 0): FORMAT", "def set_capabilities(data: bytes, offset: int = 0): FORMAT = '<H' result, = unpack_from(FORMAT,", "read_attribute_type(data: bytes, offset: int = 0): FORMAT = '<HB' result, n = unpack_from(FORMAT,", "import (unpack_from, calcsize, error) def find_attribute(data: bytes, offset: int = 0): FORMAT =", "data, offset=offset) offset += calcsize(FORMAT) payload = { 'result': result, 'sent_len': sent_len, }", "calcsize(FORMAT) value = data[offset:offset + n] offset += n if len(value) < n:", "offset def send_user_write_response(data: bytes, offset: int = 0): FORMAT = '<H' result, =", "+= calcsize(FORMAT) payload = { 'result': result, 'sent_len': sent_len, } return payload, offset", "= unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) payload = { 'result': result, }", "unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) value = data[offset:offset + n] offset +=", "def read_attribute_value(data: bytes, offset: int = 0): FORMAT = '<HB' result, n =", "int = 0): FORMAT = '<HB' result, n = unpack_from(FORMAT, data, offset=offset) offset", "def send_user_read_response(data: bytes, offset: int = 0): FORMAT = '<HH' result, sent_len =", "data[offset:offset + n] offset += n if len(value) < n: raise error payload", "'<H' result, = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) payload = { 'result':", "result, n = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) _type = data[offset:offset +", "result, 'sent_len': sent_len, } return payload, offset def send_user_read_response(data: bytes, offset: int =", "= data[offset:offset + n] offset += n if len(_type) < n: raise error", "n if len(_type) < n: raise error payload = { 'result': result, 'type':", "{ 'result': result, } return payload, offset def set_capabilities(data: bytes, offset: int =", "read_attribute_value(data: bytes, offset: int = 0): FORMAT = '<HB' result, n = unpack_from(FORMAT,", "'value': value, } return payload, offset def send_characteristic_notification(data: bytes, offset: int = 0):", "offset=offset) offset += calcsize(FORMAT) payload = { 'result': result, } return payload, offset", "= 0): FORMAT = '<HB' result, n = unpack_from(FORMAT, data, offset=offset) offset +=", "} return payload, offset def send_user_write_response(data: bytes, offset: int = 0): FORMAT =", "+= n if len(_type) < n: raise error payload = { 'result': result,", "< n: raise error payload = { 'result': result, 'type': _type, } return", "return payload, offset def set_capabilities(data: bytes, offset: int = 0): FORMAT = '<H'", "} return payload, offset def send_user_read_response(data: bytes, offset: int = 0): FORMAT =", "'result': result, 'value': value, } return payload, offset def send_characteristic_notification(data: bytes, offset: int", "+= calcsize(FORMAT) value = data[offset:offset + n] offset += n if len(value) <", "0): FORMAT = '<HB' result, n = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT)", "'sent_len': sent_len, } return payload, offset def read_attribute_type(data: bytes, offset: int = 0):", "'result': result, } return payload, offset def set_capabilities(data: bytes, offset: int = 0):", "= unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) _type = data[offset:offset + n] offset", "} return payload, offset def write_attribute_value(data: bytes, offset: int = 0): FORMAT =", "FORMAT = '<HB' result, n = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) value", "= { 'result': result, 'sent_len': sent_len, } return payload, offset def send_user_write_response(data: bytes,", "{ 'result': result, } return payload, offset def write_attribute_value(data: bytes, offset: int =", "offset += n if len(_type) < n: raise error payload = { 'result':", "'result': result, 'sent_len': sent_len, } return payload, offset def read_attribute_type(data: bytes, offset: int", "+ n] offset += n if len(_type) < n: raise error payload =", "result, = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) payload = { 'result': result,", "payload, offset def set_capabilities(data: bytes, offset: int = 0): FORMAT = '<H' result,", "+= calcsize(FORMAT) _type = data[offset:offset + n] offset += n if len(_type) <", "calcsize, error) def find_attribute(data: bytes, offset: int = 0): FORMAT = '<HH' result,", "offset=offset) offset += calcsize(FORMAT) payload = { 'result': result, 'sent_len': sent_len, } return", "return payload, offset def write_attribute_value(data: bytes, offset: int = 0): FORMAT = '<H'", "error) def find_attribute(data: bytes, offset: int = 0): FORMAT = '<HH' result, sent_len", "_type = data[offset:offset + n] offset += n if len(_type) < n: raise", "= '<H' result, = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) payload = {", "sent_len = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) payload = { 'result': result,", "{ 'result': result, 'sent_len': sent_len, } return payload, offset def read_attribute_type(data: bytes, offset:", "n if len(value) < n: raise error payload = { 'result': result, 'value':", "offset += calcsize(FORMAT) value = data[offset:offset + n] offset += n if len(value)", "result, 'sent_len': sent_len, } return payload, offset def send_user_write_response(data: bytes, offset: int =", "= '<HH' result, sent_len = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) payload =", "offset def read_attribute_type(data: bytes, offset: int = 0): FORMAT = '<HB' result, n", "'result': result, 'sent_len': sent_len, } return payload, offset def send_user_read_response(data: bytes, offset: int", "= 0): FORMAT = '<H' result, = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT)", "= unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) payload = { 'result': result, 'sent_len':", "offset: int = 0): FORMAT = '<HH' result, sent_len = unpack_from(FORMAT, data, offset=offset)", "{ 'result': result, 'sent_len': sent_len, } return payload, offset def send_user_read_response(data: bytes, offset:", "payload = { 'result': result, } return payload, offset def write_attribute_value(data: bytes, offset:", "= '<HB' result, n = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) value =", "bytes, offset: int = 0): FORMAT = '<H' result, = unpack_from(FORMAT, data, offset=offset)", "data, offset=offset) offset += calcsize(FORMAT) _type = data[offset:offset + n] offset += n", "+= calcsize(FORMAT) payload = { 'result': result, } return payload, offset def write_attribute_value(data:", "int = 0): FORMAT = '<H' result, = unpack_from(FORMAT, data, offset=offset) offset +=", "calcsize(FORMAT) _type = data[offset:offset + n] offset += n if len(_type) < n:", "def write_attribute_value(data: bytes, offset: int = 0): FORMAT = '<H' result, = unpack_from(FORMAT,", "FORMAT = '<H' result, = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) payload =", "= { 'result': result, 'type': _type, } return payload, offset def read_attribute_value(data: bytes,", "raise error payload = { 'result': result, 'value': value, } return payload, offset", "payload, offset def write_attribute_value(data: bytes, offset: int = 0): FORMAT = '<H' result,", "calcsize(FORMAT) payload = { 'result': result, 'sent_len': sent_len, } return payload, offset def", "offset: int = 0): FORMAT = '<HB' result, n = unpack_from(FORMAT, data, offset=offset)", "n: raise error payload = { 'result': result, 'type': _type, } return payload,", "value = data[offset:offset + n] offset += n if len(value) < n: raise", "if len(_type) < n: raise error payload = { 'result': result, 'type': _type,", "'result': result, 'type': _type, } return payload, offset def read_attribute_value(data: bytes, offset: int", "return payload, offset def send_user_write_response(data: bytes, offset: int = 0): FORMAT = '<H'", "'sent_len': sent_len, } return payload, offset def send_user_read_response(data: bytes, offset: int = 0):", "{ 'result': result, 'sent_len': sent_len, } return payload, offset def send_user_write_response(data: bytes, offset:", "'type': _type, } return payload, offset def read_attribute_value(data: bytes, offset: int = 0):", "offset += calcsize(FORMAT) payload = { 'result': result, 'sent_len': sent_len, } return payload,", "offset=offset) offset += calcsize(FORMAT) _type = data[offset:offset + n] offset += n if", "offset += calcsize(FORMAT) _type = data[offset:offset + n] offset += n if len(_type)", "offset += calcsize(FORMAT) payload = { 'result': result, } return payload, offset def", "} return payload, offset def set_capabilities(data: bytes, offset: int = 0): FORMAT =", "len(value) < n: raise error payload = { 'result': result, 'value': value, }", "data, offset=offset) offset += calcsize(FORMAT) value = data[offset:offset + n] offset += n", "return payload, offset def read_attribute_type(data: bytes, offset: int = 0): FORMAT = '<HB'", "send_user_read_response(data: bytes, offset: int = 0): FORMAT = '<HH' result, sent_len = unpack_from(FORMAT,", "0): FORMAT = '<H' result, = unpack_from(FORMAT, data, offset=offset) offset += calcsize(FORMAT) payload", "offset def send_user_read_response(data: bytes, offset: int = 0): FORMAT = '<HH' result, sent_len", "sent_len, } return payload, offset def send_user_write_response(data: bytes, offset: int = 0): FORMAT", "def find_attribute(data: bytes, offset: int = 0): FORMAT = '<HH' result, sent_len =", "payload, offset def read_attribute_type(data: bytes, offset: int = 0): FORMAT = '<HB' result,", "+ n] offset += n if len(value) < n: raise error payload =", "+= n if len(value) < n: raise error payload = { 'result': result," ]
[ "('members', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='course', to=settings.AUTH_USER_MODEL)), ], ), migrations.AlterField( model_name='courseparticipants', name='id_course', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='api.Course'), ),", "('api', '0003_auto_20191113_1639'), ] operations = [ migrations.CreateModel( name='Course', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False,", "('date_start_registration', models.DateField(blank=True, null=True, verbose_name='Начало регистрации')), ('level', models.CharField(blank=True, choices=[('j', 'Junior'), ('m', 'Middle'), ('s', 'Senior'),", "name='Course', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=200)), ('direction', models.CharField(blank=True, choices=[('h', 'html'),", "null=True, verbose_name='Начало регистрации')), ('level', models.CharField(blank=True, choices=[('j', 'Junior'), ('m', 'Middle'), ('s', 'Senior'), ('l', 'Lead')],", "dependencies = [ ('api', '0003_auto_20191113_1639'), ] operations = [ migrations.CreateModel( name='Course', fields=[ ('id',", "null=True)), ('date_start_registration', models.DateField(blank=True, null=True, verbose_name='Начало регистрации')), ('level', models.CharField(blank=True, choices=[('j', 'Junior'), ('m', 'Middle'), ('s',", "models.CharField(max_length=200)), ('direction', models.CharField(blank=True, choices=[('h', 'html'), ('j', 'js'), ('p', 'php'), ('py', 'python')], default='py', help_text='Направление", "fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=200)), ('direction', models.CharField(blank=True, choices=[('h', 'html'), ('j',", "[ migrations.CreateModel( name='Course', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=200)), ('direction', models.CharField(blank=True,", "[ ('api', '0003_auto_20191113_1639'), ] operations = [ migrations.CreateModel( name='Course', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True,", "= [ ('api', '0003_auto_20191113_1639'), ] operations = [ migrations.CreateModel( name='Course', fields=[ ('id', models.AutoField(auto_created=True,", "'php'), ('py', 'python')], default='py', help_text='Направление курса', max_length=1)), ('date_start', models.DateField(blank=True, null=True)), ('date_start_registration', models.DateField(blank=True, null=True,", "('j', 'js'), ('p', 'php'), ('py', 'python')], default='py', help_text='Направление курса', max_length=1)), ('date_start', models.DateField(blank=True, null=True)),", "'Middle'), ('s', 'Senior'), ('l', 'Lead')], default='j', help_text='Уровень курса', max_length=1)), ('duration', models.IntegerField(blank=True, null=True)), ('description',", "serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=200)), ('direction', models.CharField(blank=True, choices=[('h', 'html'), ('j', 'js'), ('p', 'php'), ('py',", "migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('api', '0003_auto_20191113_1639'), ] operations", "related_name='course', to=settings.AUTH_USER_MODEL)), ], ), migrations.AlterField( model_name='courseparticipants', name='id_course', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='api.Course'), ), migrations.DeleteModel( name='Courses',", "django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('api', '0003_auto_20191113_1639'),", "'python')], default='py', help_text='Направление курса', max_length=1)), ('date_start', models.DateField(blank=True, null=True)), ('date_start_registration', models.DateField(blank=True, null=True, verbose_name='Начало регистрации')),", "models.CharField(blank=True, choices=[('j', 'Junior'), ('m', 'Middle'), ('s', 'Senior'), ('l', 'Lead')], default='j', help_text='Уровень курса', max_length=1)),", "('py', 'python')], default='py', help_text='Направление курса', max_length=1)), ('date_start', models.DateField(blank=True, null=True)), ('date_start_registration', models.DateField(blank=True, null=True, verbose_name='Начало", "= [ migrations.CreateModel( name='Course', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=200)), ('direction',", "models.DateField(blank=True, null=True)), ('date_start_registration', models.DateField(blank=True, null=True, verbose_name='Начало регистрации')), ('level', models.CharField(blank=True, choices=[('j', 'Junior'), ('m', 'Middle'),", "models.DateField(blank=True, null=True, verbose_name='Начало регистрации')), ('level', models.CharField(blank=True, choices=[('j', 'Junior'), ('m', 'Middle'), ('s', 'Senior'), ('l',", "import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies =", "max_length=1)), ('duration', models.IntegerField(blank=True, null=True)), ('description', models.TextField(help_text='Описание курса', max_length=2000, null=True)), ('members', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='course', to=settings.AUTH_USER_MODEL)),", "2.2.1 on 2019-11-14 07:08 from django.conf import settings from django.db import migrations, models", "models.CharField(blank=True, choices=[('h', 'html'), ('j', 'js'), ('p', 'php'), ('py', 'python')], default='py', help_text='Направление курса', max_length=1)),", "'Junior'), ('m', 'Middle'), ('s', 'Senior'), ('l', 'Lead')], default='j', help_text='Уровень курса', max_length=1)), ('duration', models.IntegerField(blank=True,", "null=True)), ('description', models.TextField(help_text='Описание курса', max_length=2000, null=True)), ('members', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='course', to=settings.AUTH_USER_MODEL)), ], ), migrations.AlterField(", "курса', max_length=2000, null=True)), ('members', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='course', to=settings.AUTH_USER_MODEL)), ], ), migrations.AlterField( model_name='courseparticipants', name='id_course', field=models.ForeignKey(null=True,", "('m', 'Middle'), ('s', 'Senior'), ('l', 'Lead')], default='j', help_text='Уровень курса', max_length=1)), ('duration', models.IntegerField(blank=True, null=True)),", "models.IntegerField(blank=True, null=True)), ('description', models.TextField(help_text='Описание курса', max_length=2000, null=True)), ('members', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='course', to=settings.AUTH_USER_MODEL)), ], ),", "max_length=2000, null=True)), ('members', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='course', to=settings.AUTH_USER_MODEL)), ], ), migrations.AlterField( model_name='courseparticipants', name='id_course', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,", "class Migration(migrations.Migration): dependencies = [ ('api', '0003_auto_20191113_1639'), ] operations = [ migrations.CreateModel( name='Course',", "07:08 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class", "Migration(migrations.Migration): dependencies = [ ('api', '0003_auto_20191113_1639'), ] operations = [ migrations.CreateModel( name='Course', fields=[", "choices=[('h', 'html'), ('j', 'js'), ('p', 'php'), ('py', 'python')], default='py', help_text='Направление курса', max_length=1)), ('date_start',", "регистрации')), ('level', models.CharField(blank=True, choices=[('j', 'Junior'), ('m', 'Middle'), ('s', 'Senior'), ('l', 'Lead')], default='j', help_text='Уровень", "курса', max_length=1)), ('date_start', models.DateField(blank=True, null=True)), ('date_start_registration', models.DateField(blank=True, null=True, verbose_name='Начало регистрации')), ('level', models.CharField(blank=True, choices=[('j',", "Generated by Django 2.2.1 on 2019-11-14 07:08 from django.conf import settings from django.db", "django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies", "help_text='Уровень курса', max_length=1)), ('duration', models.IntegerField(blank=True, null=True)), ('description', models.TextField(help_text='Описание курса', max_length=2000, null=True)), ('members', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,", "import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('api', '0003_auto_20191113_1639'), ]", "settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [", "Django 2.2.1 on 2019-11-14 07:08 from django.conf import settings from django.db import migrations,", "], ), migrations.AlterField( model_name='courseparticipants', name='id_course', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='api.Course'), ), migrations.DeleteModel( name='Courses', ), ]", "<filename>backend/api/migrations/0004_auto_20191114_1008.py # Generated by Django 2.2.1 on 2019-11-14 07:08 from django.conf import settings", "default='py', help_text='Направление курса', max_length=1)), ('date_start', models.DateField(blank=True, null=True)), ('date_start_registration', models.DateField(blank=True, null=True, verbose_name='Начало регистрации')), ('level',", "'html'), ('j', 'js'), ('p', 'php'), ('py', 'python')], default='py', help_text='Направление курса', max_length=1)), ('date_start', models.DateField(blank=True,", "from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('api',", "('p', 'php'), ('py', 'python')], default='py', help_text='Направление курса', max_length=1)), ('date_start', models.DateField(blank=True, null=True)), ('date_start_registration', models.DateField(blank=True,", "null=True)), ('members', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='course', to=settings.AUTH_USER_MODEL)), ], ), migrations.AlterField( model_name='courseparticipants', name='id_course', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='api.Course'),", "('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=200)), ('direction', models.CharField(blank=True, choices=[('h', 'html'), ('j', 'js'),", "('date_start', models.DateField(blank=True, null=True)), ('date_start_registration', models.DateField(blank=True, null=True, verbose_name='Начало регистрации')), ('level', models.CharField(blank=True, choices=[('j', 'Junior'), ('m',", "verbose_name='ID')), ('name', models.CharField(max_length=200)), ('direction', models.CharField(blank=True, choices=[('h', 'html'), ('j', 'js'), ('p', 'php'), ('py', 'python')],", "max_length=1)), ('date_start', models.DateField(blank=True, null=True)), ('date_start_registration', models.DateField(blank=True, null=True, verbose_name='Начало регистрации')), ('level', models.CharField(blank=True, choices=[('j', 'Junior'),", "'js'), ('p', 'php'), ('py', 'python')], default='py', help_text='Направление курса', max_length=1)), ('date_start', models.DateField(blank=True, null=True)), ('date_start_registration',", "by Django 2.2.1 on 2019-11-14 07:08 from django.conf import settings from django.db import", "('description', models.TextField(help_text='Описание курса', max_length=2000, null=True)), ('members', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='course', to=settings.AUTH_USER_MODEL)), ], ), migrations.AlterField( model_name='courseparticipants',", "models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('api', '0003_auto_20191113_1639'), ] operations =", "('l', 'Lead')], default='j', help_text='Уровень курса', max_length=1)), ('duration', models.IntegerField(blank=True, null=True)), ('description', models.TextField(help_text='Описание курса', max_length=2000,", "('duration', models.IntegerField(blank=True, null=True)), ('description', models.TextField(help_text='Описание курса', max_length=2000, null=True)), ('members', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='course', to=settings.AUTH_USER_MODEL)), ],", "models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='course', to=settings.AUTH_USER_MODEL)), ], ), migrations.AlterField( model_name='courseparticipants', name='id_course', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='api.Course'), ), migrations.DeleteModel(", "migrations.CreateModel( name='Course', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=200)), ('direction', models.CharField(blank=True, choices=[('h',", "'0003_auto_20191113_1639'), ] operations = [ migrations.CreateModel( name='Course', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),", "2019-11-14 07:08 from django.conf import settings from django.db import migrations, models import django.db.models.deletion", "operations = [ migrations.CreateModel( name='Course', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=200)),", "to=settings.AUTH_USER_MODEL)), ], ), migrations.AlterField( model_name='courseparticipants', name='id_course', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='api.Course'), ), migrations.DeleteModel( name='Courses', ),", "on 2019-11-14 07:08 from django.conf import settings from django.db import migrations, models import", "('s', 'Senior'), ('l', 'Lead')], default='j', help_text='Уровень курса', max_length=1)), ('duration', models.IntegerField(blank=True, null=True)), ('description', models.TextField(help_text='Описание", "] operations = [ migrations.CreateModel( name='Course', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name',", "models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=200)), ('direction', models.CharField(blank=True, choices=[('h', 'html'), ('j', 'js'), ('p',", "from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration):", "default='j', help_text='Уровень курса', max_length=1)), ('duration', models.IntegerField(blank=True, null=True)), ('description', models.TextField(help_text='Описание курса', max_length=2000, null=True)), ('members',", "models.TextField(help_text='Описание курса', max_length=2000, null=True)), ('members', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='course', to=settings.AUTH_USER_MODEL)), ], ), migrations.AlterField( model_name='courseparticipants', name='id_course',", "primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=200)), ('direction', models.CharField(blank=True, choices=[('h', 'html'), ('j', 'js'), ('p', 'php'),", "# Generated by Django 2.2.1 on 2019-11-14 07:08 from django.conf import settings from", "('level', models.CharField(blank=True, choices=[('j', 'Junior'), ('m', 'Middle'), ('s', 'Senior'), ('l', 'Lead')], default='j', help_text='Уровень курса',", "help_text='Направление курса', max_length=1)), ('date_start', models.DateField(blank=True, null=True)), ('date_start_registration', models.DateField(blank=True, null=True, verbose_name='Начало регистрации')), ('level', models.CharField(blank=True,", "import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('api', '0003_auto_20191113_1639'), ] operations = [", "'Senior'), ('l', 'Lead')], default='j', help_text='Уровень курса', max_length=1)), ('duration', models.IntegerField(blank=True, null=True)), ('description', models.TextField(help_text='Описание курса',", "('direction', models.CharField(blank=True, choices=[('h', 'html'), ('j', 'js'), ('p', 'php'), ('py', 'python')], default='py', help_text='Направление курса',", "choices=[('j', 'Junior'), ('m', 'Middle'), ('s', 'Senior'), ('l', 'Lead')], default='j', help_text='Уровень курса', max_length=1)), ('duration',", "курса', max_length=1)), ('duration', models.IntegerField(blank=True, null=True)), ('description', models.TextField(help_text='Описание курса', max_length=2000, null=True)), ('members', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='course',", "verbose_name='Начало регистрации')), ('level', models.CharField(blank=True, choices=[('j', 'Junior'), ('m', 'Middle'), ('s', 'Senior'), ('l', 'Lead')], default='j',", "('name', models.CharField(max_length=200)), ('direction', models.CharField(blank=True, choices=[('h', 'html'), ('j', 'js'), ('p', 'php'), ('py', 'python')], default='py',", "django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('api', '0003_auto_20191113_1639'), ] operations = [ migrations.CreateModel(", "'Lead')], default='j', help_text='Уровень курса', max_length=1)), ('duration', models.IntegerField(blank=True, null=True)), ('description', models.TextField(help_text='Описание курса', max_length=2000, null=True))," ]
[ "4)), \"20\": ((1, 8), (1, 8)) } sensor_shape = { \"1140\": (38, 30),", "(1, 8)) } sensor_shape = { \"1140\": (38, 30), \"285\": (19, 15), \"80\":", "4), (1, 4)), \"20\": ((1, 8), (1, 8)) } sensor_shape = { \"1140\":", "8), (1, 8)) } sensor_shape = { \"1140\": (38, 30), \"285\": (19, 15),", "\"20\": ((1, 8), (1, 8)) } sensor_shape = { \"1140\": (38, 30), \"285\":", "\"1140\": (38, 30), \"285\": (19, 15), \"80\": (10, 8), \"20\": (5, 4) }", "8)) } sensor_shape = { \"1140\": (38, 30), \"285\": (19, 15), \"80\": (10,", "{ \"1140\": (38, 30), \"285\": (19, 15), \"80\": (10, 8), \"20\": (5, 4)", "\"80\": ((1, 4), (1, 4)), \"20\": ((1, 8), (1, 8)) } sensor_shape =", "((1, 4), (1, 4)), \"20\": ((1, 8), (1, 8)) } sensor_shape = {", "\"285\": ((1, 2), (1, 2)), \"80\": ((1, 4), (1, 4)), \"20\": ((1, 8),", "sensor_shape = { \"1140\": (38, 30), \"285\": (19, 15), \"80\": (10, 8), \"20\":", "(1, 4)), \"20\": ((1, 8), (1, 8)) } sensor_shape = { \"1140\": (38,", "sensor_indices = { \"1140\": ((0, 1), (0, 1)), \"285\": ((1, 2), (1, 2)),", "2)), \"80\": ((1, 4), (1, 4)), \"20\": ((1, 8), (1, 8)) } sensor_shape", "= { \"1140\": (38, 30), \"285\": (19, 15), \"80\": (10, 8), \"20\": (5,", "(0, 1)), \"285\": ((1, 2), (1, 2)), \"80\": ((1, 4), (1, 4)), \"20\":", "2), (1, 2)), \"80\": ((1, 4), (1, 4)), \"20\": ((1, 8), (1, 8))", "1), (0, 1)), \"285\": ((1, 2), (1, 2)), \"80\": ((1, 4), (1, 4)),", "((0, 1), (0, 1)), \"285\": ((1, 2), (1, 2)), \"80\": ((1, 4), (1,", "= { \"1140\": ((0, 1), (0, 1)), \"285\": ((1, 2), (1, 2)), \"80\":", "1)), \"285\": ((1, 2), (1, 2)), \"80\": ((1, 4), (1, 4)), \"20\": ((1,", "\"1140\": ((0, 1), (0, 1)), \"285\": ((1, 2), (1, 2)), \"80\": ((1, 4),", "(1, 2)), \"80\": ((1, 4), (1, 4)), \"20\": ((1, 8), (1, 8)) }", "} sensor_shape = { \"1140\": (38, 30), \"285\": (19, 15), \"80\": (10, 8),", "{ \"1140\": ((0, 1), (0, 1)), \"285\": ((1, 2), (1, 2)), \"80\": ((1,", "((1, 2), (1, 2)), \"80\": ((1, 4), (1, 4)), \"20\": ((1, 8), (1,", "((1, 8), (1, 8)) } sensor_shape = { \"1140\": (38, 30), \"285\": (19," ]
[ "at the start of each CSV file into a new file \"\"\" import", "= orig.readlines() for line in lines: if line.startswith('Bank'): new_file.close() idx = idx +", "tmp_files: os.rename(tmp_file, tmp_file.with_suffix('.tmp')) # Loop through and separate the new files orig_files =", "1 new_file_name = orig_file.parent.joinpath( orig_file.stem + '-' + str(idx) + '.txt') new_file =", "pathlib import Path def main(root: str): root_path = Path(root) # Stash old files", "line in lines: if line.startswith('Bank'): new_file.close() idx = idx + 1 new_file_name =", "the new files orig_files = [f for f in root_path.glob('*.tmp')] for orig_file in", "files to *.tmp files 2. Go through and break up at the start", "Path(root) # Stash old files tmp_files = [f for f in root_path.glob('*.txt')] for", "with open(orig_file, 'r', encoding='cp1252') as orig: lines = orig.readlines() for line in lines:", "Path def main(root: str): root_path = Path(root) # Stash old files tmp_files =", "0 new_file_name = orig_file.parent.joinpath(orig_file.stem + '-' + str(idx) + '.txt') new_file = open(new_file_name,", "files We do this in two steps: 1. Move all existing *.txt files", "orig_files: idx = 0 new_file_name = orig_file.parent.joinpath(orig_file.stem + '-' + str(idx) + '.txt')", "def main(root: str): root_path = Path(root) # Stash old files tmp_files = [f", "root_path.glob('*.txt')] for tmp_file in tmp_files: os.rename(tmp_file, tmp_file.with_suffix('.tmp')) # Loop through and separate the", "root_path.glob('*.tmp')] for orig_file in orig_files: idx = 0 new_file_name = orig_file.parent.joinpath(orig_file.stem + '-'", "new_file_name = orig_file.parent.joinpath(orig_file.stem + '-' + str(idx) + '.txt') new_file = open(new_file_name, 'w')", "for f in root_path.glob('*.tmp')] for orig_file in orig_files: idx = 0 new_file_name =", "1. Move all existing *.txt files to *.tmp files 2. Go through and", "tmp_files = [f for f in root_path.glob('*.txt')] for tmp_file in tmp_files: os.rename(tmp_file, tmp_file.with_suffix('.tmp'))", "+ 1 new_file_name = orig_file.parent.joinpath( orig_file.stem + '-' + str(idx) + '.txt') new_file", "new_file_name = orig_file.parent.joinpath( orig_file.stem + '-' + str(idx) + '.txt') new_file = open(new_file_name,", "'.txt') new_file = open(new_file_name, 'w') new_file.write(line) else: new_file.write(line) new_file.close() if __name__ == '__main__':", "'r', encoding='cp1252') as orig: lines = orig.readlines() for line in lines: if line.startswith('Bank'):", "We do this in two steps: 1. Move all existing *.txt files to", "orig_file in orig_files: idx = 0 new_file_name = orig_file.parent.joinpath(orig_file.stem + '-' + str(idx)", "'.txt') new_file = open(new_file_name, 'w') with open(orig_file, 'r', encoding='cp1252') as orig: lines =", "each CSV file into a new file \"\"\" import os import sys from", "Move all existing *.txt files to *.tmp files 2. Go through and break", "new_file = open(new_file_name, 'w') new_file.write(line) else: new_file.write(line) new_file.close() if __name__ == '__main__': if", "file \"\"\" import os import sys from pathlib import Path def main(root: str):", "+ str(idx) + '.txt') new_file = open(new_file_name, 'w') new_file.write(line) else: new_file.write(line) new_file.close() if", "unique files We do this in two steps: 1. Move all existing *.txt", "and break up at the start of each CSV file into a new", "Loop through and separate the new files orig_files = [f for f in", "orig_files = [f for f in root_path.glob('*.tmp')] for orig_file in orig_files: idx =", "of each CSV file into a new file \"\"\" import os import sys", "== 1: main(os.getcwd()) elif len(sys.argv) == 2: main(sys.argv[1]) else: print(\"Wrong number of args.\")", "in lines: if line.startswith('Bank'): new_file.close() idx = idx + 1 new_file_name = orig_file.parent.joinpath(", "this in two steps: 1. Move all existing *.txt files to *.tmp files", "new_file.close() if __name__ == '__main__': if len(sys.argv) == 1: main(os.getcwd()) elif len(sys.argv) ==", "= orig_file.parent.joinpath(orig_file.stem + '-' + str(idx) + '.txt') new_file = open(new_file_name, 'w') with", "open(new_file_name, 'w') with open(orig_file, 'r', encoding='cp1252') as orig: lines = orig.readlines() for line", "if line.startswith('Bank'): new_file.close() idx = idx + 1 new_file_name = orig_file.parent.joinpath( orig_file.stem +", "# Stash old files tmp_files = [f for f in root_path.glob('*.txt')] for tmp_file", "*.txt files to *.tmp files 2. Go through and break up at the", "do this in two steps: 1. Move all existing *.txt files to *.tmp", "tmp_file in tmp_files: os.rename(tmp_file, tmp_file.with_suffix('.tmp')) # Loop through and separate the new files", "#!/usr/bin/env python3 \"\"\"Separates Altera's junky concatenated CSV files into unique files We do", "new file \"\"\" import os import sys from pathlib import Path def main(root:", "= open(new_file_name, 'w') new_file.write(line) else: new_file.write(line) new_file.close() if __name__ == '__main__': if len(sys.argv)", "'-' + str(idx) + '.txt') new_file = open(new_file_name, 'w') new_file.write(line) else: new_file.write(line) new_file.close()", "open(new_file_name, 'w') new_file.write(line) else: new_file.write(line) new_file.close() if __name__ == '__main__': if len(sys.argv) ==", "= idx + 1 new_file_name = orig_file.parent.joinpath( orig_file.stem + '-' + str(idx) +", "python3 \"\"\"Separates Altera's junky concatenated CSV files into unique files We do this", "orig_file.stem + '-' + str(idx) + '.txt') new_file = open(new_file_name, 'w') new_file.write(line) else:", "files 2. Go through and break up at the start of each CSV", "a new file \"\"\" import os import sys from pathlib import Path def", "'-' + str(idx) + '.txt') new_file = open(new_file_name, 'w') with open(orig_file, 'r', encoding='cp1252')", "# Loop through and separate the new files orig_files = [f for f", "\"\"\" import os import sys from pathlib import Path def main(root: str): root_path", "concatenated CSV files into unique files We do this in two steps: 1.", "= Path(root) # Stash old files tmp_files = [f for f in root_path.glob('*.txt')]", "new files orig_files = [f for f in root_path.glob('*.tmp')] for orig_file in orig_files:", "str): root_path = Path(root) # Stash old files tmp_files = [f for f", "'__main__': if len(sys.argv) == 1: main(os.getcwd()) elif len(sys.argv) == 2: main(sys.argv[1]) else: print(\"Wrong", "in root_path.glob('*.tmp')] for orig_file in orig_files: idx = 0 new_file_name = orig_file.parent.joinpath(orig_file.stem +", "+ '.txt') new_file = open(new_file_name, 'w') new_file.write(line) else: new_file.write(line) new_file.close() if __name__ ==", "str(idx) + '.txt') new_file = open(new_file_name, 'w') with open(orig_file, 'r', encoding='cp1252') as orig:", "os.rename(tmp_file, tmp_file.with_suffix('.tmp')) # Loop through and separate the new files orig_files = [f", "start of each CSV file into a new file \"\"\" import os import", "orig_file.parent.joinpath(orig_file.stem + '-' + str(idx) + '.txt') new_file = open(new_file_name, 'w') with open(orig_file,", "'w') new_file.write(line) else: new_file.write(line) new_file.close() if __name__ == '__main__': if len(sys.argv) == 1:", "through and break up at the start of each CSV file into a", "Altera's junky concatenated CSV files into unique files We do this in two", "import Path def main(root: str): root_path = Path(root) # Stash old files tmp_files", "'w') with open(orig_file, 'r', encoding='cp1252') as orig: lines = orig.readlines() for line in", "= [f for f in root_path.glob('*.txt')] for tmp_file in tmp_files: os.rename(tmp_file, tmp_file.with_suffix('.tmp')) #", "for line in lines: if line.startswith('Bank'): new_file.close() idx = idx + 1 new_file_name", "files tmp_files = [f for f in root_path.glob('*.txt')] for tmp_file in tmp_files: os.rename(tmp_file,", "as orig: lines = orig.readlines() for line in lines: if line.startswith('Bank'): new_file.close() idx", "lines = orig.readlines() for line in lines: if line.startswith('Bank'): new_file.close() idx = idx", "orig.readlines() for line in lines: if line.startswith('Bank'): new_file.close() idx = idx + 1", "up at the start of each CSV file into a new file \"\"\"", "CSV files into unique files We do this in two steps: 1. Move", "open(orig_file, 'r', encoding='cp1252') as orig: lines = orig.readlines() for line in lines: if", "in root_path.glob('*.txt')] for tmp_file in tmp_files: os.rename(tmp_file, tmp_file.with_suffix('.tmp')) # Loop through and separate", "2. Go through and break up at the start of each CSV file", "+ '.txt') new_file = open(new_file_name, 'w') with open(orig_file, 'r', encoding='cp1252') as orig: lines", "+ str(idx) + '.txt') new_file = open(new_file_name, 'w') with open(orig_file, 'r', encoding='cp1252') as", "new_file = open(new_file_name, 'w') with open(orig_file, 'r', encoding='cp1252') as orig: lines = orig.readlines()", "import os import sys from pathlib import Path def main(root: str): root_path =", "orig: lines = orig.readlines() for line in lines: if line.startswith('Bank'): new_file.close() idx =", "for orig_file in orig_files: idx = 0 new_file_name = orig_file.parent.joinpath(orig_file.stem + '-' +", "= [f for f in root_path.glob('*.tmp')] for orig_file in orig_files: idx = 0", "in two steps: 1. Move all existing *.txt files to *.tmp files 2.", "= open(new_file_name, 'w') with open(orig_file, 'r', encoding='cp1252') as orig: lines = orig.readlines() for", "*.tmp files 2. Go through and break up at the start of each", "[f for f in root_path.glob('*.tmp')] for orig_file in orig_files: idx = 0 new_file_name", "and separate the new files orig_files = [f for f in root_path.glob('*.tmp')] for", "two steps: 1. Move all existing *.txt files to *.tmp files 2. Go", "new_file.close() idx = idx + 1 new_file_name = orig_file.parent.joinpath( orig_file.stem + '-' +", "tmp_file.with_suffix('.tmp')) # Loop through and separate the new files orig_files = [f for", "else: new_file.write(line) new_file.close() if __name__ == '__main__': if len(sys.argv) == 1: main(os.getcwd()) elif", "sys from pathlib import Path def main(root: str): root_path = Path(root) # Stash", "existing *.txt files to *.tmp files 2. Go through and break up at", "Stash old files tmp_files = [f for f in root_path.glob('*.txt')] for tmp_file in", "through and separate the new files orig_files = [f for f in root_path.glob('*.tmp')]", "str(idx) + '.txt') new_file = open(new_file_name, 'w') new_file.write(line) else: new_file.write(line) new_file.close() if __name__", "into a new file \"\"\" import os import sys from pathlib import Path", "root_path = Path(root) # Stash old files tmp_files = [f for f in", "files into unique files We do this in two steps: 1. Move all", "import sys from pathlib import Path def main(root: str): root_path = Path(root) #", "junky concatenated CSV files into unique files We do this in two steps:", "old files tmp_files = [f for f in root_path.glob('*.txt')] for tmp_file in tmp_files:", "to *.tmp files 2. Go through and break up at the start of", "idx = 0 new_file_name = orig_file.parent.joinpath(orig_file.stem + '-' + str(idx) + '.txt') new_file", "__name__ == '__main__': if len(sys.argv) == 1: main(os.getcwd()) elif len(sys.argv) == 2: main(sys.argv[1])", "if len(sys.argv) == 1: main(os.getcwd()) elif len(sys.argv) == 2: main(sys.argv[1]) else: print(\"Wrong number", "the start of each CSV file into a new file \"\"\" import os", "os import sys from pathlib import Path def main(root: str): root_path = Path(root)", "CSV file into a new file \"\"\" import os import sys from pathlib", "+ '-' + str(idx) + '.txt') new_file = open(new_file_name, 'w') with open(orig_file, 'r',", "lines: if line.startswith('Bank'): new_file.close() idx = idx + 1 new_file_name = orig_file.parent.joinpath( orig_file.stem", "f in root_path.glob('*.txt')] for tmp_file in tmp_files: os.rename(tmp_file, tmp_file.with_suffix('.tmp')) # Loop through and", "new_file.write(line) else: new_file.write(line) new_file.close() if __name__ == '__main__': if len(sys.argv) == 1: main(os.getcwd())", "in tmp_files: os.rename(tmp_file, tmp_file.with_suffix('.tmp')) # Loop through and separate the new files orig_files", "new_file.write(line) new_file.close() if __name__ == '__main__': if len(sys.argv) == 1: main(os.getcwd()) elif len(sys.argv)", "= 0 new_file_name = orig_file.parent.joinpath(orig_file.stem + '-' + str(idx) + '.txt') new_file =", "into unique files We do this in two steps: 1. Move all existing", "\"\"\"Separates Altera's junky concatenated CSV files into unique files We do this in", "if __name__ == '__main__': if len(sys.argv) == 1: main(os.getcwd()) elif len(sys.argv) == 2:", "in orig_files: idx = 0 new_file_name = orig_file.parent.joinpath(orig_file.stem + '-' + str(idx) +", "main(root: str): root_path = Path(root) # Stash old files tmp_files = [f for", "files orig_files = [f for f in root_path.glob('*.tmp')] for orig_file in orig_files: idx", "all existing *.txt files to *.tmp files 2. Go through and break up", "steps: 1. Move all existing *.txt files to *.tmp files 2. Go through", "file into a new file \"\"\" import os import sys from pathlib import", "[f for f in root_path.glob('*.txt')] for tmp_file in tmp_files: os.rename(tmp_file, tmp_file.with_suffix('.tmp')) # Loop", "encoding='cp1252') as orig: lines = orig.readlines() for line in lines: if line.startswith('Bank'): new_file.close()", "idx = idx + 1 new_file_name = orig_file.parent.joinpath( orig_file.stem + '-' + str(idx)", "f in root_path.glob('*.tmp')] for orig_file in orig_files: idx = 0 new_file_name = orig_file.parent.joinpath(orig_file.stem", "for tmp_file in tmp_files: os.rename(tmp_file, tmp_file.with_suffix('.tmp')) # Loop through and separate the new", "separate the new files orig_files = [f for f in root_path.glob('*.tmp')] for orig_file", "idx + 1 new_file_name = orig_file.parent.joinpath( orig_file.stem + '-' + str(idx) + '.txt')", "+ '-' + str(idx) + '.txt') new_file = open(new_file_name, 'w') new_file.write(line) else: new_file.write(line)", "= orig_file.parent.joinpath( orig_file.stem + '-' + str(idx) + '.txt') new_file = open(new_file_name, 'w')", "from pathlib import Path def main(root: str): root_path = Path(root) # Stash old", "line.startswith('Bank'): new_file.close() idx = idx + 1 new_file_name = orig_file.parent.joinpath( orig_file.stem + '-'", "len(sys.argv) == 1: main(os.getcwd()) elif len(sys.argv) == 2: main(sys.argv[1]) else: print(\"Wrong number of", "Go through and break up at the start of each CSV file into", "for f in root_path.glob('*.txt')] for tmp_file in tmp_files: os.rename(tmp_file, tmp_file.with_suffix('.tmp')) # Loop through", "== '__main__': if len(sys.argv) == 1: main(os.getcwd()) elif len(sys.argv) == 2: main(sys.argv[1]) else:", "break up at the start of each CSV file into a new file", "orig_file.parent.joinpath( orig_file.stem + '-' + str(idx) + '.txt') new_file = open(new_file_name, 'w') new_file.write(line)" ]
[ "global DOCROOT self.host = host self.port = int(port) DOCROOT = docroot def start(self):", "def start(self): self.httpd = EasyServer((self.host, self.port), MozRequestHandler) self.server = threading.Thread(target=self.httpd.serve_forever) self.server.setDaemon(True) # don't", "this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import BaseHTTPServer import SimpleHTTPServer", "of these calls are from log_message def address_string(self): return \"a.b.c.d\" # This produces", "threading.Thread(target=self.httpd.serve_forever) self.server.setDaemon(True) # don't hang on exit self.server.start() #self.testServer() #TODO: figure this out", "'win32': sep = '' ret = '%s%s' % ( sep, DOCROOT.strip('/') ) #", "= '' ret = '%s%s' % ( sep, DOCROOT.strip('/') ) # Stub out", "urlparse from SocketServer import ThreadingMixIn DOCROOT = '.' class EasyServer(ThreadingMixIn, BaseHTTPServer.HTTPServer): allow_reuse_address =", "API updates so we can update our stubbed files with the changes. if", "self.port), MozRequestHandler) self.server = threading.Thread(target=self.httpd.serve_forever) self.server.setDaemon(True) # don't hang on exit self.server.start() #self.testServer()", "FOUND: \" + webline.strip() def stop(self): if self.httpd: self.httpd.shutdown() self.httpd.server_close() __del__ = stop", "else: for fileName in fileList: if fileName == webline: found = True if", "Source Code Form is subject to the terms of the Mozilla Public #", "my local network that calls to this were timing out # I believe", "log_message def address_string(self): return \"a.b.c.d\" # This produces a LOT of noise def", "= '.' class EasyServer(ThreadingMixIn, BaseHTTPServer.HTTPServer): allow_reuse_address = True class MozRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def translate_path(self, path):", "is '/' and os.path.join makes the '/' o = urlparse(path) sep = '/'", "found on my local network that calls to this were timing out #", "#!/usr/bin/python # # This Source Code Form is subject to the terms of", "can update our stubbed files with the changes. if o.path.find('/en-US/firefox/api/1.5/search/guid:') == 0: ids", "def __init__(self, host=\"127.0.0.1\", port=8888, docroot='.'): global DOCROOT self.host = host self.port = int(port)", "we can update our stubbed files with the changes. if o.path.find('/en-US/firefox/api/1.5/search/guid:') == 0:", "I found on my local network that calls to this were timing out", "*args): pass class MozHttpd(object): def __init__(self, host=\"127.0.0.1\", port=8888, docroot='.'): global DOCROOT self.host =", "if (found == False): print \"NOT FOUND: \" + webline.strip() def stop(self): if", "= '%s%s' % ( sep, DOCROOT.strip('/') ) # Stub out addons.mozilla.org search API,", "the terms of the Mozilla Public # License, v. 2.0. If a copy", "(self.host, self.port)) data = filehandle.readlines(); filehandle.close() for line in data: found = False", "out addons.mozilla.org search API, which is used when installing # add-ons. The version", "webline: found = True if (found == False): print \"NOT FOUND: \" +", "out # I believe all of these calls are from log_message def address_string(self):", "a LOT of noise def log_message(self, format, *args): pass class MozHttpd(object): def __init__(self,", "0: ids = urllib.unquote(o.path[len('/en-US/firefox/api/1.5/search/guid:'):]) if ids.count(',') > 0: raise Exception('Searching for multiple IDs", "fileList: if fileName == webline: found = True if (found == False): print", "for fileName in fileList: if fileName == webline: found = True if (found", "from log_message def address_string(self): return \"a.b.c.d\" # This produces a LOT of noise", "# # This Source Code Form is subject to the terms of the", "= ids at_loc = ids.find('@') if at_loc > 0: base = ids[0:at_loc] ret", "not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import", "'.' class EasyServer(ThreadingMixIn, BaseHTTPServer.HTTPServer): allow_reuse_address = True class MozRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def translate_path(self, path): #", "subject to the terms of the Mozilla Public # License, v. 2.0. If", "the MPL was not distributed with this # file, You can obtain one", "path): # It appears that the default path is '/' and os.path.join makes", "the '/' o = urlparse(path) sep = '/' if sys.platform == 'win32': sep", "fileName in fileList: if fileName == webline: found = True if (found ==", "True if (found == False): print \"NOT FOUND: \" + webline.strip() def stop(self):", "__init__(self, host=\"127.0.0.1\", port=8888, docroot='.'): global DOCROOT self.host = host self.port = int(port) DOCROOT", "'/%s' % o.path.strip('/') return ret # I found on my local network that", "import ThreadingMixIn DOCROOT = '.' class EasyServer(ThreadingMixIn, BaseHTTPServer.HTTPServer): allow_reuse_address = True class MozRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):", "threading import sys import os import urllib import re from urlparse import urlparse", "return ret # I found on my local network that calls to this", "LOT of noise def log_message(self, format, *args): pass class MozHttpd(object): def __init__(self, host=\"127.0.0.1\",", "and os.path.join makes the '/' o = urlparse(path) sep = '/' if sys.platform", "local network that calls to this were timing out # I believe all", "webline != \"\": if webline == \"Directory listing for\": found = True else:", "Stub out addons.mozilla.org search API, which is used when installing # add-ons. The", "\"\": if webline == \"Directory listing for\": found = True else: for fileName", "believe all of these calls are from log_message def address_string(self): return \"a.b.c.d\" #", "'' ret = '%s%s' % ( sep, DOCROOT.strip('/') ) # Stub out addons.mozilla.org", "'', line.strip('\\n')).strip('/').strip().strip('@') if webline != \"\": if webline == \"Directory listing for\": found", "network that calls to this were timing out # I believe all of", "the Mozilla Public # License, v. 2.0. If a copy of the MPL", "MPL was not distributed with this # file, You can obtain one at", "= False # '@' denotes a symlink and we need to ignore it.", "== webline: found = True if (found == False): print \"NOT FOUND: \"", "is used when installing # add-ons. The version is hard-coded because we want", "urllib.unquote(o.path[len('/en-US/firefox/api/1.5/search/guid:'):]) if ids.count(',') > 0: raise Exception('Searching for multiple IDs is not supported.')", "line in data: found = False # '@' denotes a symlink and we", "that calls to this were timing out # I believe all of these", "fileName == webline: found = True if (found == False): print \"NOT FOUND:", "used when installing # add-ons. The version is hard-coded because we want tests", "docroot def start(self): self.httpd = EasyServer((self.host, self.port), MozRequestHandler) self.server = threading.Thread(target=self.httpd.serve_forever) self.server.setDaemon(True) #", "BaseHTTPServer import SimpleHTTPServer import threading import sys import os import urllib import re", "== 0: ids = urllib.unquote(o.path[len('/en-US/firefox/api/1.5/search/guid:'):]) if ids.count(',') > 0: raise Exception('Searching for multiple", "appears that the default path is '/' and os.path.join makes the '/' o", "urllib import re from urlparse import urlparse from SocketServer import ThreadingMixIn DOCROOT =", "figure this out def testServer(self): fileList = os.listdir(DOCROOT) filehandle = urllib.urlopen('http://%s:%s' % (self.host,", "== 'win32': sep = '' ret = '%s%s' % ( sep, DOCROOT.strip('/') )", "path is '/' and os.path.join makes the '/' o = urlparse(path) sep =", "o.path.strip('/') return ret # I found on my local network that calls to", "base = ids[0:at_loc] ret += '/%s.xml' % base else: ret += '/%s' %", "self.port)) data = filehandle.readlines(); filehandle.close() for line in data: found = False #", "to the terms of the Mozilla Public # License, v. 2.0. If a", "This Source Code Form is subject to the terms of the Mozilla Public", "found = True else: for fileName in fileList: if fileName == webline: found", "IDs is not supported.') base = ids at_loc = ids.find('@') if at_loc >", "= urlparse(path) sep = '/' if sys.platform == 'win32': sep = '' ret", "hang on exit self.server.start() #self.testServer() #TODO: figure this out def testServer(self): fileList =", "SocketServer import ThreadingMixIn DOCROOT = '.' class EasyServer(ThreadingMixIn, BaseHTTPServer.HTTPServer): allow_reuse_address = True class", "# don't hang on exit self.server.start() #self.testServer() #TODO: figure this out def testServer(self):", "need to ignore it. webline = re.sub('\\<[a-zA-Z0-9\\-\\_\\.\\=\\\"\\'\\/\\\\\\%\\!\\@\\#\\$\\^\\&\\*\\(\\) ]*\\>', '', line.strip('\\n')).strip('/').strip().strip('@') if webline !=", "If a copy of the MPL was not distributed with this # file,", "out def testServer(self): fileList = os.listdir(DOCROOT) filehandle = urllib.urlopen('http://%s:%s' % (self.host, self.port)) data", "import sys import os import urllib import re from urlparse import urlparse from", "= True else: for fileName in fileList: if fileName == webline: found =", "\"Directory listing for\": found = True else: for fileName in fileList: if fileName", "stubbed files with the changes. if o.path.find('/en-US/firefox/api/1.5/search/guid:') == 0: ids = urllib.unquote(o.path[len('/en-US/firefox/api/1.5/search/guid:'):]) if", "MozRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def translate_path(self, path): # It appears that the default path is '/'", "tests to fail when # the API updates so we can update our", "2.0. If a copy of the MPL was not distributed with this #", "copy of the MPL was not distributed with this # file, You can", "DOCROOT = docroot def start(self): self.httpd = EasyServer((self.host, self.port), MozRequestHandler) self.server = threading.Thread(target=self.httpd.serve_forever)", "sys.platform == 'win32': sep = '' ret = '%s%s' % ( sep, DOCROOT.strip('/')", "'/' and os.path.join makes the '/' o = urlparse(path) sep = '/' if", "can obtain one at http://mozilla.org/MPL/2.0/. import BaseHTTPServer import SimpleHTTPServer import threading import sys", "filehandle = urllib.urlopen('http://%s:%s' % (self.host, self.port)) data = filehandle.readlines(); filehandle.close() for line in", "class MozRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def translate_path(self, path): # It appears that the default path is", "Form is subject to the terms of the Mozilla Public # License, v.", "<gh_stars>1-10 #!/usr/bin/python # # This Source Code Form is subject to the terms", "if sys.platform == 'win32': sep = '' ret = '%s%s' % ( sep,", "EasyServer((self.host, self.port), MozRequestHandler) self.server = threading.Thread(target=self.httpd.serve_forever) self.server.setDaemon(True) # don't hang on exit self.server.start()", "urllib.urlopen('http://%s:%s' % (self.host, self.port)) data = filehandle.readlines(); filehandle.close() for line in data: found", "API, which is used when installing # add-ons. The version is hard-coded because", "ThreadingMixIn DOCROOT = '.' class EasyServer(ThreadingMixIn, BaseHTTPServer.HTTPServer): allow_reuse_address = True class MozRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def", "search API, which is used when installing # add-ons. The version is hard-coded", "sys import os import urllib import re from urlparse import urlparse from SocketServer", "import threading import sys import os import urllib import re from urlparse import", "self.host = host self.port = int(port) DOCROOT = docroot def start(self): self.httpd =", "'%s%s' % ( sep, DOCROOT.strip('/') ) # Stub out addons.mozilla.org search API, which", "import urllib import re from urlparse import urlparse from SocketServer import ThreadingMixIn DOCROOT", "else: ret += '/%s' % o.path.strip('/') return ret # I found on my", "fileList = os.listdir(DOCROOT) filehandle = urllib.urlopen('http://%s:%s' % (self.host, self.port)) data = filehandle.readlines(); filehandle.close()", "import SimpleHTTPServer import threading import sys import os import urllib import re from", "distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import BaseHTTPServer", "print \"NOT FOUND: \" + webline.strip() def stop(self): if self.httpd: self.httpd.shutdown() self.httpd.server_close() __del__", "sep = '/' if sys.platform == 'win32': sep = '' ret = '%s%s'", "The version is hard-coded because we want tests to fail when # the", "for multiple IDs is not supported.') base = ids at_loc = ids.find('@') if", "% base else: ret += '/%s' % o.path.strip('/') return ret # I found", "= True if (found == False): print \"NOT FOUND: \" + webline.strip() def", "urlparse(path) sep = '/' if sys.platform == 'win32': sep = '' ret =", "timing out # I believe all of these calls are from log_message def", "http://mozilla.org/MPL/2.0/. import BaseHTTPServer import SimpleHTTPServer import threading import sys import os import urllib", "want tests to fail when # the API updates so we can update", "License, v. 2.0. If a copy of the MPL was not distributed with", "our stubbed files with the changes. if o.path.find('/en-US/firefox/api/1.5/search/guid:') == 0: ids = urllib.unquote(o.path[len('/en-US/firefox/api/1.5/search/guid:'):])", "== False): print \"NOT FOUND: \" + webline.strip() def stop(self): if self.httpd: self.httpd.shutdown()", "import urlparse from SocketServer import ThreadingMixIn DOCROOT = '.' class EasyServer(ThreadingMixIn, BaseHTTPServer.HTTPServer): allow_reuse_address", "is hard-coded because we want tests to fail when # the API updates", "makes the '/' o = urlparse(path) sep = '/' if sys.platform == 'win32':", "!= \"\": if webline == \"Directory listing for\": found = True else: for", "EasyServer(ThreadingMixIn, BaseHTTPServer.HTTPServer): allow_reuse_address = True class MozRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def translate_path(self, path): # It appears", "for line in data: found = False # '@' denotes a symlink and", "class EasyServer(ThreadingMixIn, BaseHTTPServer.HTTPServer): allow_reuse_address = True class MozRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def translate_path(self, path): # It", "in fileList: if fileName == webline: found = True if (found == False):", "found = True if (found == False): print \"NOT FOUND: \" + webline.strip()", "DOCROOT = '.' class EasyServer(ThreadingMixIn, BaseHTTPServer.HTTPServer): allow_reuse_address = True class MozRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def translate_path(self,", "self.server = threading.Thread(target=self.httpd.serve_forever) self.server.setDaemon(True) # don't hang on exit self.server.start() #self.testServer() #TODO: figure", "were timing out # I believe all of these calls are from log_message", "file, You can obtain one at http://mozilla.org/MPL/2.0/. import BaseHTTPServer import SimpleHTTPServer import threading", "on exit self.server.start() #self.testServer() #TODO: figure this out def testServer(self): fileList = os.listdir(DOCROOT)", "# License, v. 2.0. If a copy of the MPL was not distributed", "ids.find('@') if at_loc > 0: base = ids[0:at_loc] ret += '/%s.xml' % base", "% ( sep, DOCROOT.strip('/') ) # Stub out addons.mozilla.org search API, which is", "when # the API updates so we can update our stubbed files with", "# I believe all of these calls are from log_message def address_string(self): return", "is not supported.') base = ids at_loc = ids.find('@') if at_loc > 0:", "Exception('Searching for multiple IDs is not supported.') base = ids at_loc = ids.find('@')", "data: found = False # '@' denotes a symlink and we need to", "% o.path.strip('/') return ret # I found on my local network that calls", "we need to ignore it. webline = re.sub('\\<[a-zA-Z0-9\\-\\_\\.\\=\\\"\\'\\/\\\\\\%\\!\\@\\#\\$\\^\\&\\*\\(\\) ]*\\>', '', line.strip('\\n')).strip('/').strip().strip('@') if webline", "= re.sub('\\<[a-zA-Z0-9\\-\\_\\.\\=\\\"\\'\\/\\\\\\%\\!\\@\\#\\$\\^\\&\\*\\(\\) ]*\\>', '', line.strip('\\n')).strip('/').strip().strip('@') if webline != \"\": if webline == \"Directory", "return \"a.b.c.d\" # This produces a LOT of noise def log_message(self, format, *args):", "listing for\": found = True else: for fileName in fileList: if fileName ==", "+= '/%s.xml' % base else: ret += '/%s' % o.path.strip('/') return ret #", "SimpleHTTPServer import threading import sys import os import urllib import re from urlparse", "to fail when # the API updates so we can update our stubbed", "translate_path(self, path): # It appears that the default path is '/' and os.path.join", "# file, You can obtain one at http://mozilla.org/MPL/2.0/. import BaseHTTPServer import SimpleHTTPServer import", "DOCROOT.strip('/') ) # Stub out addons.mozilla.org search API, which is used when installing", "at_loc > 0: base = ids[0:at_loc] ret += '/%s.xml' % base else: ret", "class MozHttpd(object): def __init__(self, host=\"127.0.0.1\", port=8888, docroot='.'): global DOCROOT self.host = host self.port", "update our stubbed files with the changes. if o.path.find('/en-US/firefox/api/1.5/search/guid:') == 0: ids =", "ret += '/%s.xml' % base else: ret += '/%s' % o.path.strip('/') return ret", "import re from urlparse import urlparse from SocketServer import ThreadingMixIn DOCROOT = '.'", "with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import BaseHTTPServer import", "addons.mozilla.org search API, which is used when installing # add-ons. The version is", "ret # I found on my local network that calls to this were", "ignore it. webline = re.sub('\\<[a-zA-Z0-9\\-\\_\\.\\=\\\"\\'\\/\\\\\\%\\!\\@\\#\\$\\^\\&\\*\\(\\) ]*\\>', '', line.strip('\\n')).strip('/').strip().strip('@') if webline != \"\": if", "def translate_path(self, path): # It appears that the default path is '/' and", "= ids[0:at_loc] ret += '/%s.xml' % base else: ret += '/%s' % o.path.strip('/')", "a symlink and we need to ignore it. webline = re.sub('\\<[a-zA-Z0-9\\-\\_\\.\\=\\\"\\'\\/\\\\\\%\\!\\@\\#\\$\\^\\&\\*\\(\\) ]*\\>', '',", "ret = '%s%s' % ( sep, DOCROOT.strip('/') ) # Stub out addons.mozilla.org search", "calls are from log_message def address_string(self): return \"a.b.c.d\" # This produces a LOT", "'/' if sys.platform == 'win32': sep = '' ret = '%s%s' % (", "= ids.find('@') if at_loc > 0: base = ids[0:at_loc] ret += '/%s.xml' %", "so we can update our stubbed files with the changes. if o.path.find('/en-US/firefox/api/1.5/search/guid:') ==", "allow_reuse_address = True class MozRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def translate_path(self, path): # It appears that the", "filehandle.close() for line in data: found = False # '@' denotes a symlink", "True class MozRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def translate_path(self, path): # It appears that the default path", "= urllib.unquote(o.path[len('/en-US/firefox/api/1.5/search/guid:'):]) if ids.count(',') > 0: raise Exception('Searching for multiple IDs is not", "# This Source Code Form is subject to the terms of the Mozilla", "noise def log_message(self, format, *args): pass class MozHttpd(object): def __init__(self, host=\"127.0.0.1\", port=8888, docroot='.'):", "hard-coded because we want tests to fail when # the API updates so", "# add-ons. The version is hard-coded because we want tests to fail when", "at_loc = ids.find('@') if at_loc > 0: base = ids[0:at_loc] ret += '/%s.xml'", "address_string(self): return \"a.b.c.d\" # This produces a LOT of noise def log_message(self, format,", "#TODO: figure this out def testServer(self): fileList = os.listdir(DOCROOT) filehandle = urllib.urlopen('http://%s:%s' %", "calls to this were timing out # I believe all of these calls", "from urlparse import urlparse from SocketServer import ThreadingMixIn DOCROOT = '.' class EasyServer(ThreadingMixIn,", ") # Stub out addons.mozilla.org search API, which is used when installing #", "BaseHTTPServer.HTTPServer): allow_reuse_address = True class MozRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def translate_path(self, path): # It appears that", "MozRequestHandler) self.server = threading.Thread(target=self.httpd.serve_forever) self.server.setDaemon(True) # don't hang on exit self.server.start() #self.testServer() #TODO:", "webline == \"Directory listing for\": found = True else: for fileName in fileList:", "when installing # add-ons. The version is hard-coded because we want tests to", "and we need to ignore it. webline = re.sub('\\<[a-zA-Z0-9\\-\\_\\.\\=\\\"\\'\\/\\\\\\%\\!\\@\\#\\$\\^\\&\\*\\(\\) ]*\\>', '', line.strip('\\n')).strip('/').strip().strip('@') if", "import BaseHTTPServer import SimpleHTTPServer import threading import sys import os import urllib import", "self.port = int(port) DOCROOT = docroot def start(self): self.httpd = EasyServer((self.host, self.port), MozRequestHandler)", "raise Exception('Searching for multiple IDs is not supported.') base = ids at_loc =", "host=\"127.0.0.1\", port=8888, docroot='.'): global DOCROOT self.host = host self.port = int(port) DOCROOT =", "= int(port) DOCROOT = docroot def start(self): self.httpd = EasyServer((self.host, self.port), MozRequestHandler) self.server", "'@' denotes a symlink and we need to ignore it. webline = re.sub('\\<[a-zA-Z0-9\\-\\_\\.\\=\\\"\\'\\/\\\\\\%\\!\\@\\#\\$\\^\\&\\*\\(\\)", "if ids.count(',') > 0: raise Exception('Searching for multiple IDs is not supported.') base", "# '@' denotes a symlink and we need to ignore it. webline =", "one at http://mozilla.org/MPL/2.0/. import BaseHTTPServer import SimpleHTTPServer import threading import sys import os", "data = filehandle.readlines(); filehandle.close() for line in data: found = False # '@'", "]*\\>', '', line.strip('\\n')).strip('/').strip().strip('@') if webline != \"\": if webline == \"Directory listing for\":", "changes. if o.path.find('/en-US/firefox/api/1.5/search/guid:') == 0: ids = urllib.unquote(o.path[len('/en-US/firefox/api/1.5/search/guid:'):]) if ids.count(',') > 0: raise", "of noise def log_message(self, format, *args): pass class MozHttpd(object): def __init__(self, host=\"127.0.0.1\", port=8888,", "that the default path is '/' and os.path.join makes the '/' o =", "in data: found = False # '@' denotes a symlink and we need", "this out def testServer(self): fileList = os.listdir(DOCROOT) filehandle = urllib.urlopen('http://%s:%s' % (self.host, self.port))", "import os import urllib import re from urlparse import urlparse from SocketServer import", "found = False # '@' denotes a symlink and we need to ignore", "if webline == \"Directory listing for\": found = True else: for fileName in", "= EasyServer((self.host, self.port), MozRequestHandler) self.server = threading.Thread(target=self.httpd.serve_forever) self.server.setDaemon(True) # don't hang on exit", "symlink and we need to ignore it. webline = re.sub('\\<[a-zA-Z0-9\\-\\_\\.\\=\\\"\\'\\/\\\\\\%\\!\\@\\#\\$\\^\\&\\*\\(\\) ]*\\>', '', line.strip('\\n')).strip('/').strip().strip('@')", "produces a LOT of noise def log_message(self, format, *args): pass class MozHttpd(object): def", "all of these calls are from log_message def address_string(self): return \"a.b.c.d\" # This", "not supported.') base = ids at_loc = ids.find('@') if at_loc > 0: base", "self.httpd = EasyServer((self.host, self.port), MozRequestHandler) self.server = threading.Thread(target=self.httpd.serve_forever) self.server.setDaemon(True) # don't hang on", "0: raise Exception('Searching for multiple IDs is not supported.') base = ids at_loc", "at http://mozilla.org/MPL/2.0/. import BaseHTTPServer import SimpleHTTPServer import threading import sys import os import", "this were timing out # I believe all of these calls are from", "# Stub out addons.mozilla.org search API, which is used when installing # add-ons.", "\"a.b.c.d\" # This produces a LOT of noise def log_message(self, format, *args): pass", "(found == False): print \"NOT FOUND: \" + webline.strip() def stop(self): if self.httpd:", "urlparse import urlparse from SocketServer import ThreadingMixIn DOCROOT = '.' class EasyServer(ThreadingMixIn, BaseHTTPServer.HTTPServer):", "sep, DOCROOT.strip('/') ) # Stub out addons.mozilla.org search API, which is used when", "It appears that the default path is '/' and os.path.join makes the '/'", "re from urlparse import urlparse from SocketServer import ThreadingMixIn DOCROOT = '.' class", "for\": found = True else: for fileName in fileList: if fileName == webline:", "pass class MozHttpd(object): def __init__(self, host=\"127.0.0.1\", port=8888, docroot='.'): global DOCROOT self.host = host", "int(port) DOCROOT = docroot def start(self): self.httpd = EasyServer((self.host, self.port), MozRequestHandler) self.server =", "= threading.Thread(target=self.httpd.serve_forever) self.server.setDaemon(True) # don't hang on exit self.server.start() #self.testServer() #TODO: figure this", "\"NOT FOUND: \" + webline.strip() def stop(self): if self.httpd: self.httpd.shutdown() self.httpd.server_close() __del__ =", "DOCROOT self.host = host self.port = int(port) DOCROOT = docroot def start(self): self.httpd", "# This produces a LOT of noise def log_message(self, format, *args): pass class", "start(self): self.httpd = EasyServer((self.host, self.port), MozRequestHandler) self.server = threading.Thread(target=self.httpd.serve_forever) self.server.setDaemon(True) # don't hang", "of the Mozilla Public # License, v. 2.0. If a copy of the", "installing # add-ons. The version is hard-coded because we want tests to fail", "of the MPL was not distributed with this # file, You can obtain", "updates so we can update our stubbed files with the changes. if o.path.find('/en-US/firefox/api/1.5/search/guid:')", "ids[0:at_loc] ret += '/%s.xml' % base else: ret += '/%s' % o.path.strip('/') return", "MozHttpd(object): def __init__(self, host=\"127.0.0.1\", port=8888, docroot='.'): global DOCROOT self.host = host self.port =", "> 0: raise Exception('Searching for multiple IDs is not supported.') base = ids", "= filehandle.readlines(); filehandle.close() for line in data: found = False # '@' denotes", "o = urlparse(path) sep = '/' if sys.platform == 'win32': sep = ''", "ids at_loc = ids.find('@') if at_loc > 0: base = ids[0:at_loc] ret +=", "= docroot def start(self): self.httpd = EasyServer((self.host, self.port), MozRequestHandler) self.server = threading.Thread(target=self.httpd.serve_forever) self.server.setDaemon(True)", "+= '/%s' % o.path.strip('/') return ret # I found on my local network", "if webline != \"\": if webline == \"Directory listing for\": found = True", "files with the changes. if o.path.find('/en-US/firefox/api/1.5/search/guid:') == 0: ids = urllib.unquote(o.path[len('/en-US/firefox/api/1.5/search/guid:'):]) if ids.count(',')", "ids.count(',') > 0: raise Exception('Searching for multiple IDs is not supported.') base =", "'/%s.xml' % base else: ret += '/%s' % o.path.strip('/') return ret # I", "supported.') base = ids at_loc = ids.find('@') if at_loc > 0: base =", "self.server.setDaemon(True) # don't hang on exit self.server.start() #self.testServer() #TODO: figure this out def", "testServer(self): fileList = os.listdir(DOCROOT) filehandle = urllib.urlopen('http://%s:%s' % (self.host, self.port)) data = filehandle.readlines();", "False # '@' denotes a symlink and we need to ignore it. webline", "0: base = ids[0:at_loc] ret += '/%s.xml' % base else: ret += '/%s'", "multiple IDs is not supported.') base = ids at_loc = ids.find('@') if at_loc", "Code Form is subject to the terms of the Mozilla Public # License,", "with the changes. if o.path.find('/en-US/firefox/api/1.5/search/guid:') == 0: ids = urllib.unquote(o.path[len('/en-US/firefox/api/1.5/search/guid:'):]) if ids.count(',') >", "= host self.port = int(port) DOCROOT = docroot def start(self): self.httpd = EasyServer((self.host,", "are from log_message def address_string(self): return \"a.b.c.d\" # This produces a LOT of", "os.listdir(DOCROOT) filehandle = urllib.urlopen('http://%s:%s' % (self.host, self.port)) data = filehandle.readlines(); filehandle.close() for line", "> 0: base = ids[0:at_loc] ret += '/%s.xml' % base else: ret +=", "def log_message(self, format, *args): pass class MozHttpd(object): def __init__(self, host=\"127.0.0.1\", port=8888, docroot='.'): global", "# It appears that the default path is '/' and os.path.join makes the", "re.sub('\\<[a-zA-Z0-9\\-\\_\\.\\=\\\"\\'\\/\\\\\\%\\!\\@\\#\\$\\^\\&\\*\\(\\) ]*\\>', '', line.strip('\\n')).strip('/').strip().strip('@') if webline != \"\": if webline == \"Directory listing", "we want tests to fail when # the API updates so we can", "== \"Directory listing for\": found = True else: for fileName in fileList: if", "os import urllib import re from urlparse import urlparse from SocketServer import ThreadingMixIn", "ret += '/%s' % o.path.strip('/') return ret # I found on my local", "if fileName == webline: found = True if (found == False): print \"NOT", "base = ids at_loc = ids.find('@') if at_loc > 0: base = ids[0:at_loc]", "host self.port = int(port) DOCROOT = docroot def start(self): self.httpd = EasyServer((self.host, self.port),", "fail when # the API updates so we can update our stubbed files", "line.strip('\\n')).strip('/').strip().strip('@') if webline != \"\": if webline == \"Directory listing for\": found =", "if at_loc > 0: base = ids[0:at_loc] ret += '/%s.xml' % base else:", "these calls are from log_message def address_string(self): return \"a.b.c.d\" # This produces a", "filehandle.readlines(); filehandle.close() for line in data: found = False # '@' denotes a", "which is used when installing # add-ons. The version is hard-coded because we", "def address_string(self): return \"a.b.c.d\" # This produces a LOT of noise def log_message(self,", "was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/.", "to this were timing out # I believe all of these calls are", "webline = re.sub('\\<[a-zA-Z0-9\\-\\_\\.\\=\\\"\\'\\/\\\\\\%\\!\\@\\#\\$\\^\\&\\*\\(\\) ]*\\>', '', line.strip('\\n')).strip('/').strip().strip('@') if webline != \"\": if webline ==", "False): print \"NOT FOUND: \" + webline.strip() def stop(self): if self.httpd: self.httpd.shutdown() self.httpd.server_close()", "self.server.start() #self.testServer() #TODO: figure this out def testServer(self): fileList = os.listdir(DOCROOT) filehandle =", "exit self.server.start() #self.testServer() #TODO: figure this out def testServer(self): fileList = os.listdir(DOCROOT) filehandle", "Public # License, v. 2.0. If a copy of the MPL was not", "terms of the Mozilla Public # License, v. 2.0. If a copy of", "a copy of the MPL was not distributed with this # file, You", "I believe all of these calls are from log_message def address_string(self): return \"a.b.c.d\"", "it. webline = re.sub('\\<[a-zA-Z0-9\\-\\_\\.\\=\\\"\\'\\/\\\\\\%\\!\\@\\#\\$\\^\\&\\*\\(\\) ]*\\>', '', line.strip('\\n')).strip('/').strip().strip('@') if webline != \"\": if webline", "is subject to the terms of the Mozilla Public # License, v. 2.0.", "port=8888, docroot='.'): global DOCROOT self.host = host self.port = int(port) DOCROOT = docroot", "log_message(self, format, *args): pass class MozHttpd(object): def __init__(self, host=\"127.0.0.1\", port=8888, docroot='.'): global DOCROOT", "ids = urllib.unquote(o.path[len('/en-US/firefox/api/1.5/search/guid:'):]) if ids.count(',') > 0: raise Exception('Searching for multiple IDs is", "because we want tests to fail when # the API updates so we", "denotes a symlink and we need to ignore it. webline = re.sub('\\<[a-zA-Z0-9\\-\\_\\.\\=\\\"\\'\\/\\\\\\%\\!\\@\\#\\$\\^\\&\\*\\(\\) ]*\\>',", "Mozilla Public # License, v. 2.0. If a copy of the MPL was", "'/' o = urlparse(path) sep = '/' if sys.platform == 'win32': sep =", "obtain one at http://mozilla.org/MPL/2.0/. import BaseHTTPServer import SimpleHTTPServer import threading import sys import", "You can obtain one at http://mozilla.org/MPL/2.0/. import BaseHTTPServer import SimpleHTTPServer import threading import", "( sep, DOCROOT.strip('/') ) # Stub out addons.mozilla.org search API, which is used", "the changes. if o.path.find('/en-US/firefox/api/1.5/search/guid:') == 0: ids = urllib.unquote(o.path[len('/en-US/firefox/api/1.5/search/guid:'):]) if ids.count(',') > 0:", "# I found on my local network that calls to this were timing", "= True class MozRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): def translate_path(self, path): # It appears that the default", "v. 2.0. If a copy of the MPL was not distributed with this", "base else: ret += '/%s' % o.path.strip('/') return ret # I found on", "= urllib.urlopen('http://%s:%s' % (self.host, self.port)) data = filehandle.readlines(); filehandle.close() for line in data:", "% (self.host, self.port)) data = filehandle.readlines(); filehandle.close() for line in data: found =", "= '/' if sys.platform == 'win32': sep = '' ret = '%s%s' %", "= os.listdir(DOCROOT) filehandle = urllib.urlopen('http://%s:%s' % (self.host, self.port)) data = filehandle.readlines(); filehandle.close() for", "add-ons. The version is hard-coded because we want tests to fail when #", "the API updates so we can update our stubbed files with the changes.", "to ignore it. webline = re.sub('\\<[a-zA-Z0-9\\-\\_\\.\\=\\\"\\'\\/\\\\\\%\\!\\@\\#\\$\\^\\&\\*\\(\\) ]*\\>', '', line.strip('\\n')).strip('/').strip().strip('@') if webline != \"\":", "docroot='.'): global DOCROOT self.host = host self.port = int(port) DOCROOT = docroot def", "don't hang on exit self.server.start() #self.testServer() #TODO: figure this out def testServer(self): fileList", "o.path.find('/en-US/firefox/api/1.5/search/guid:') == 0: ids = urllib.unquote(o.path[len('/en-US/firefox/api/1.5/search/guid:'):]) if ids.count(',') > 0: raise Exception('Searching for", "sep = '' ret = '%s%s' % ( sep, DOCROOT.strip('/') ) # Stub", "True else: for fileName in fileList: if fileName == webline: found = True", "if o.path.find('/en-US/firefox/api/1.5/search/guid:') == 0: ids = urllib.unquote(o.path[len('/en-US/firefox/api/1.5/search/guid:'):]) if ids.count(',') > 0: raise Exception('Searching", "on my local network that calls to this were timing out # I", "os.path.join makes the '/' o = urlparse(path) sep = '/' if sys.platform ==", "format, *args): pass class MozHttpd(object): def __init__(self, host=\"127.0.0.1\", port=8888, docroot='.'): global DOCROOT self.host", "# the API updates so we can update our stubbed files with the", "def testServer(self): fileList = os.listdir(DOCROOT) filehandle = urllib.urlopen('http://%s:%s' % (self.host, self.port)) data =", "the default path is '/' and os.path.join makes the '/' o = urlparse(path)", "from SocketServer import ThreadingMixIn DOCROOT = '.' class EasyServer(ThreadingMixIn, BaseHTTPServer.HTTPServer): allow_reuse_address = True", "default path is '/' and os.path.join makes the '/' o = urlparse(path) sep", "version is hard-coded because we want tests to fail when # the API", "#self.testServer() #TODO: figure this out def testServer(self): fileList = os.listdir(DOCROOT) filehandle = urllib.urlopen('http://%s:%s'", "This produces a LOT of noise def log_message(self, format, *args): pass class MozHttpd(object):" ]